<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0"
     xmlns:content="http://purl.org/rss/1.0/modules/content/"
     xmlns:dc="https://purl.org/dc/elements/1.1/"
     xmlns:dcterms="http://purl.org/dc/terms/"
     xmlns:media="http://search.yahoo.com/mrss/"
     xmlns:atom="http://www.w3.org/2005/Atom"
>
    <channel>
        <atom:link href="https://www.windowscentral.com/feeds/tag/local-ai" rel="self" type="application/rss+xml" />
                    <title><![CDATA[ Latest from Windows Central in Local-ai ]]></title>
                <link>https://www.windowscentral.com/tag/local-ai</link>
        <description><![CDATA[ All the latest local-ai content from the Windows Central team ]]></description>
                                    <lastBuildDate>Mon, 02 Mar 2026 08:00:00 +0000</lastBuildDate>
                            <language>en</language>
                                <item>
                                                            <title><![CDATA[ AMD's new Zen 5 enterprise processors still have some of the best local AI chiplets I've seen on x86-64 — Ryzen AI PRO 400 hits 50-60 TOPS to crush Copilot+ PC certification ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/hardware/cpus/amd-ryzen-ai-pro-400-series-announcement-mwc-barcelona</link>
                                                                            <description>
                            <![CDATA[ AMD's AI PRO 400 Series is Gorgon Point chips in a suit and tie and repackaged for the enterprise audience, with some of the best x86-64 NPUs I've seen. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">y7sRiihfeyLviGf9NwCZ4f</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/hdEZoNdHdSypYmniXh4WBZ-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Mon, 02 Mar 2026 08:00:00 +0000</pubDate>                                                                                                                                                                                                                                <category><![CDATA[CPUs]]></category>
                                                    <category><![CDATA[Hardware]]></category>
                                                    <category><![CDATA[Desktops]]></category>
                                                    <category><![CDATA[Components]]></category>
                                                                                                <author><![CDATA[ ben.wilson@windowscentral.com (Ben Wilson) ]]></author>                    <dc:creator><![CDATA[ Ben Wilson ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/hdEZoNdHdSypYmniXh4WBZ-1280-80.jpg">
                                                            <media:credit><![CDATA[AMD, Microsoft | Edited with Gemini]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[AMD revealed Ryzen AI PRO 400 for MWC Barcelona.]]></media:description>                                                            <media:text><![CDATA[AMD Ryzen AI PRO desktop processor with Microsoft Copilot logo on a computer desk]]></media:text>
                                <media:title type="plain"><![CDATA[AMD Ryzen AI PRO desktop processor with Microsoft Copilot logo on a computer desk]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/hdEZoNdHdSypYmniXh4WBZ-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ Ollama on Windows 11 vs WSL: two brilliant ways to use local LLMs ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/artificial-intelligence/ollama-on-wsl-works-just-as-well-as-natively-on-windows-11</link>
                                                                            <description>
                            <![CDATA[ On Windows 11, you can use Ollama either natively or through WSL, with the latter being potentially important for developers. The good news is, it works well. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">DCdhS4LN29zNobLwv3BnyL</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/Z5encHgut2x7aLx5VTxYKo-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Wed, 03 Sep 2025 14:44:00 +0000</pubDate>                                                                                                                                <updated>Thu, 04 Sep 2025 09:42:08 +0000</updated>
                                                                                                                                            <category><![CDATA[Artificial Intelligence]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/Z5encHgut2x7aLx5VTxYKo-1280-80.jpg">
                                                            <media:credit><![CDATA[Windows Central]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[You have two ways to use Ollama on a Windows 11 PC, and they&#039;re both brilliant.]]></media:description>                                                            <media:text><![CDATA[Ollama running in PowerShell and Ubuntu on WSL]]></media:text>
                                <media:title type="plain"><![CDATA[Ollama running in PowerShell and Ubuntu on WSL]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/Z5encHgut2x7aLx5VTxYKo-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ Geekom A9 Max review: The mini PC that thinks it’s a gaming rig ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/hardware/desktops/geekom-a9-max-review</link>
                                                                            <description>
                            <![CDATA[ The Geekom A9 Max mini PC is at the pricier end of the spectrum, but when it's an easy replacement for a desktop PC, even for gaming, it makes so much sense. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">tbADnep6T7gX8mnJZxAQaQ</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/UdkWgXsdNR6Bu2hTPPpQBU-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Wed, 27 Aug 2025 11:03:00 +0000</pubDate>                                                                                                                                <updated>Fri, 19 Dec 2025 20:39:18 +0000</updated>
                                                                                                                                            <category><![CDATA[Desktops]]></category>
                                                    <category><![CDATA[Hardware]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/UdkWgXsdNR6Bu2hTPPpQBU-1280-80.jpg">
                                                            <media:credit><![CDATA[Windows Central]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[The Geekom A9 Max is small, but mighty. ]]></media:description>                                                            <media:text><![CDATA[Geekom A9 Max mini PC with Windows Central Editor&#039;s Choice badge. ]]></media:text>
                                <media:title type="plain"><![CDATA[Geekom A9 Max mini PC with Windows Central Editor&#039;s Choice badge. ]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/UdkWgXsdNR6Bu2hTPPpQBU-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ NVIDIA just built a “brain” that can power humanoid robots and physical AI — and it only costs $3,499 ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/artificial-intelligence/nvidia-just-built-a-brain-that-can-power-humanoid-robots-and-physical-ai-and-it-only-costs-usd3-499</link>
                                                                            <description>
                            <![CDATA[ NVIDIA just announced Jetson Thor, a "robot brain" that powers humanoids, self-driving cars, and smart machines with real-time generative AI. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">hJp7rtyNGTYfGTS4S2aKbQ</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/EKvpBcTK3uSaZcLp2GnyqF-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Tue, 26 Aug 2025 20:34:52 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:43:30 +0000</updated>
                                                                                                                                            <category><![CDATA[Artificial Intelligence]]></category>
                                                                                                <author><![CDATA[ sean.endicott@futurenet.com (Sean Endicott) ]]></author>                    <dc:creator><![CDATA[ Sean Endicott ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/EKvpBcTK3uSaZcLp2GnyqF-1280-80.jpg">
                                                            <media:credit><![CDATA[Getty Images]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[Jetson Thor is a robotics computer that can power physical AI devices through real-time reasoning.]]></media:description>                                                            <media:text><![CDATA[Artificial Intelligence robot brain and circuit board on a black background.]]></media:text>
                                <media:title type="plain"><![CDATA[Artificial Intelligence robot brain and circuit board on a black background.]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/EKvpBcTK3uSaZcLp2GnyqF-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ Just what sort of GPU do you need to run local AI with Ollama? — The answer isn't as expensive as you might think  ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/artificial-intelligence/just-what-sort-of-gpu-do-you-need-to-run-local-ai-with-ollama-the-answer-isnt-as-expensive-as-you-might-think</link>
                                                                            <description>
                            <![CDATA[ If you're looking at your PC and wondering what sort of GPU you might need to power local LLMs, the good news is it doesn't have to be as expensive as you think. Allow me to explain. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">iMEspQUhXmHdSzZtYvDgkR</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/aT329JYHb8hHZVA2zEikLg-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Mon, 25 Aug 2025 19:30:00 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:37:00 +0000</updated>
                                                                                                                                            <category><![CDATA[Artificial Intelligence]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/aT329JYHb8hHZVA2zEikLg-1280-80.jpg">
                                                            <media:credit><![CDATA[Ben Wilson | Windows Central]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[You don&#039;t NEED to splash the cash on one of these to run local AI with Ollama. ]]></media:description>                                                            <media:text><![CDATA[Razer Blade 18 (2025) on with RGB activated]]></media:text>
                                <media:title type="plain"><![CDATA[Razer Blade 18 (2025) on with RGB activated]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/aT329JYHb8hHZVA2zEikLg-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ When it comes to running Ollama on your PC for local AI, one thing matters more than most — here's why ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/artificial-intelligence/when-it-comes-to-running-ollama-on-your-pc-for-local-ai-one-thing-matters-more-than-most-heres-why</link>
                                                                            <description>
                            <![CDATA[ If you're running LLMs locally on your PC using Ollama there's one key hardware spec you need to take into consideration. If not, your performance will tank. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">LeoZsv8UQjCsoHixwf8vk4</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/mKyLUJhWQjSjzLgyvpUdYh-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Mon, 25 Aug 2025 14:04:17 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:37:25 +0000</updated>
                                                                                                                                            <category><![CDATA[Artificial Intelligence]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/mKyLUJhWQjSjzLgyvpUdYh-1280-80.jpg">
                                                            <media:credit><![CDATA[PC Gamer]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[Both of these are pretty good for gaming, but one is better for AI because of its VRAM. ]]></media:description>                                                            <media:text><![CDATA[MSI RTX 5070 Ti]]></media:text>
                                <media:title type="plain"><![CDATA[MSI RTX 5070 Ti]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/mKyLUJhWQjSjzLgyvpUdYh-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ The Geekom A9 Mega will be Windows 11's answer to Apple's Mac Studio — only better and more affordable ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/hardware/the-geekom-a9-mega-will-be-windows-11s-answer-to-apples-mac-studio-only-better-and-more-affordable</link>
                                                                            <description>
                            <![CDATA[ Geekom's next mini PC, the A9 Mega, is arguably the first true Windows 11-powered competitor to the Mac Studio. Partly because it looks like it, and partly the earth-shattering performance inside. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">yLp4rzhV673oKTmuzZ7uuH</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/XFDfCAij86hsaVbQRLXidB-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Sat, 23 Aug 2025 20:56:44 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:46:02 +0000</updated>
                                                                                                                                            <category><![CDATA[Hardware]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/XFDfCAij86hsaVbQRLXidB-1280-80.jpg">
                                                            <media:credit><![CDATA[Geekom]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[The Geekom A9 Mega has that Apple&#039;esque appeal to it and some absolutely ludicrous hardware inside.]]></media:description>                                                            <media:text><![CDATA[A lifestyle shot of the Geekom A9 Mega on a desk playing a video game. ]]></media:text>
                                <media:title type="plain"><![CDATA[A lifestyle shot of the Geekom A9 Mega on a desk playing a video game. ]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/XFDfCAij86hsaVbQRLXidB-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ NVIDIA expands its AI gaming tool to more GPUs — should you trust it in-game? ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/hardware/nvidia/nvidia-expands-its-ai-gaming-tool-to-more-gpus-should-you-trust-it-in-game</link>
                                                                            <description>
                            <![CDATA[ NVIDIA's Project G-Assist, an AI gaming assistant, is set to receive its first major update to improve performance and to get it running on a wider range of hardware. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">xJfNBKxKtjVVGCe3ZLKxVZ</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/9SFWfmZJPo9uKW8yxzZkte-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Sat, 23 Aug 2025 13:37:00 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:42:57 +0000</updated>
                                                                                                                                            <category><![CDATA[Nvidia]]></category>
                                                    <category><![CDATA[Hardware]]></category>
                                                    <category><![CDATA[Desktops]]></category>
                                                                                                <author><![CDATA[ c.cale.hunt@gmail.com (Cale Hunt) ]]></author>                    <dc:creator><![CDATA[ Cale Hunt ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/9SFWfmZJPo9uKW8yxzZkte-1280-80.jpg">
                                                            <media:credit><![CDATA[Harish Jonnalagadda / Windows Central]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[NVIDIA expands AI gaming assistant to more GPUs with August update.]]></media:description>                                                            <media:text><![CDATA[Colorful GeForce RTX 4080 NB EX-V review]]></media:text>
                                <media:title type="plain"><![CDATA[Colorful GeForce RTX 4080 NB EX-V review]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/9SFWfmZJPo9uKW8yxzZkte-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ I tried to replace my favorite Copilot feature with Ollama — but I failed (sort-of) ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/artificial-intelligence/i-tried-replace-my-favorite-copilot-feature-with-local-ai</link>
                                                                            <description>
                            <![CDATA[ Using Copilot to summarize web articles is one of my favorite features. I tried to replicate it using an on-device AI model and it just isn't the same. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">zuzqJyLyffjfkPitZ2CoHT</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/TCWExjtEghTEB5TC6X5MjL-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Thu, 21 Aug 2025 11:32:00 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:38:06 +0000</updated>
                                                                                                                                            <category><![CDATA[Artificial Intelligence]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/TCWExjtEghTEB5TC6X5MjL-1280-80.jpg">
                                                            <media:credit><![CDATA[Windows Central]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[Copilot is still the absolute king when it comes to summarizing web pages. ]]></media:description>                                                            <media:text><![CDATA[Asking Copilot to summarize a Windows Central article]]></media:text>
                                <media:title type="plain"><![CDATA[Asking Copilot to summarize a Windows Central article]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/TCWExjtEghTEB5TC6X5MjL-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ You NEED to ditch Ollama and use LM Studio for local AI if you have a laptop or mini PC — here's why  ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/artificial-intelligence/ditch-ollama-and-use-lm-studio-for-local-ai-if-you-have-a-laptop-or-mini-pc</link>
                                                                            <description>
                            <![CDATA[ I've been playing with Ollama a lot recently, but it lacks in one key area that has sent me back over to trying LM Studio, with great success, and no need for dedicated GPU. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">bKQ8i4F4fGCgvyABGmwDxm</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/GrA2nUj3phx3FUUjev32VQ-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Tue, 19 Aug 2025 11:12:00 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:38:48 +0000</updated>
                                                                                                                                            <category><![CDATA[Artificial Intelligence]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/GrA2nUj3phx3FUUjev32VQ-1280-80.jpg">
                                                            <media:credit><![CDATA[Apoorva Bhardwaj / Windows Central]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[With LM Studio, even a mini PC like this can be an AI powerhouse. ]]></media:description>                                                            <media:text><![CDATA[Geekom A8 design showcase]]></media:text>
                                <media:title type="plain"><![CDATA[Geekom A8 design showcase]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/GrA2nUj3phx3FUUjev32VQ-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ We gave OpenAI’s open-source AI a kid’s test — here’s what happened ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/artificial-intelligence/we-gave-openais-open-source-ai-a-kids-test-heres-what-happened</link>
                                                                            <description>
                            <![CDATA[ OpenAI has finally released some AI models folks can use at home on their local machines, so I decided to see if it was better at a test designed for children than my own kid. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">XgRPrbxvhxmAZAHhaZTm6e</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/tSBJuxcyFEzg8K9S9Mx8CE-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Sat, 16 Aug 2025 14:11:11 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:39:14 +0000</updated>
                                                                                                                                            <category><![CDATA[Artificial Intelligence]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/tSBJuxcyFEzg8K9S9Mx8CE-1280-80.jpg">
                                                            <media:credit><![CDATA[Getty Images]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[I asked gpt-oss:20b to answer a sample test of the standard my son is preparing to take. ]]></media:description>                                                            <media:text><![CDATA[Happy boy using system AI Chatbot in computer or mobile application. Chatbot conversation, Ai Artificial Intelligence technology. Open AI generate. Futuristic technology. Virtual assistant on internet.]]></media:text>
                                <media:title type="plain"><![CDATA[Happy boy using system AI Chatbot in computer or mobile application. Chatbot conversation, Ai Artificial Intelligence technology. Open AI generate. Futuristic technology. Virtual assistant on internet.]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/tSBJuxcyFEzg8K9S9Mx8CE-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ Using 'local AI' — how my 7-year-old laptop still punches above its weight ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/artificial-intelligence/my-seven-year-old-mid-range-laptop-runs-local-ai</link>
                                                                            <description>
                            <![CDATA[ While most of my local AI work takes place on a fairly well-equipped desktop PC, I was curious to see what mileage I could get from an old laptop I have lying around, and it surprised me. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">NyjzYwbYPcCzy7wX94EB7m</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/FwPDbQXAaBHP8G78YiByBR-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Wed, 13 Aug 2025 11:09:00 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:39:33 +0000</updated>
                                                                                                                                            <category><![CDATA[Artificial Intelligence]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/FwPDbQXAaBHP8G78YiByBR-1280-80.jpg">
                                                            <media:credit><![CDATA[Windows Central]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[This laptop is seven years old, and it can still run a local LLM.]]></media:description>                                                            <media:text><![CDATA[A Huawei MateBook D running Ollama on Fedora 42]]></media:text>
                                <media:title type="plain"><![CDATA[A Huawei MateBook D running Ollama on Fedora 42]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/FwPDbQXAaBHP8G78YiByBR-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ This Common Mistake in Ollama Could Be Killing Your AI Performance in Windows 11 — Here’s How to Fix It ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/artificial-intelligence/mistake-with-ollama-on-windows-sucked-away-performance</link>
                                                                            <description>
                            <![CDATA[ Ollama is one of the easiest ways to integrate localized AI LLMs into your daily workflow, but you might be leaving performance on the table by making one crucial mistake, like I was. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">hLDuuE8VLj79w7wc9yfPyD</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/8y9RCsQ5YgD2wRzmwufo8B-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Tue, 12 Aug 2025 14:15:00 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:39:51 +0000</updated>
                                                                                                                                            <category><![CDATA[Artificial Intelligence]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/8y9RCsQ5YgD2wRzmwufo8B-1280-80.jpg">
                                                            <media:credit><![CDATA[Windows Central]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[Being comfortable with Ollama in the terminal will help you easily identify how its performing. ]]></media:description>                                                            <media:text><![CDATA[Ollama stats in Windows Terminal showing eval rate and the split of CPU and GPU being used. ]]></media:text>
                                <media:title type="plain"><![CDATA[Ollama stats in Windows Terminal showing eval rate and the split of CPU and GPU being used. ]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/8y9RCsQ5YgD2wRzmwufo8B-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ ChatGPT Isn’t the Only Game in Town—Here’s Why Local AI Might Be Better (and How to Use It on Your PC) ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/artificial-intelligence/5-reasons-to-use-local-ai-tools-over-copilot-or-chatgpt-anyone-can-try-it-so-why-wouldnt-you</link>
                                                                            <description>
                            <![CDATA[ AI is far more than the likes of ChatGPT and Copilot, and while these online tools are accessible to all, there are plenty of reasons to start using localized tools on your PC instead. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">qPZDQTopXjsacXFuSRUFAb</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/pu2BjA9FBQS3QdnPL99emD-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Sat, 09 Aug 2025 20:15:00 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:40:12 +0000</updated>
                                                                                                                                            <category><![CDATA[Artificial Intelligence]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/pu2BjA9FBQS3QdnPL99emD-1280-80.jpg">
                                                            <media:credit><![CDATA[Windows Central]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[I&#039;ve recently been using local LLMs to try and learn some PowerShell skills. ]]></media:description>                                                            <media:text><![CDATA[Ollama running the Gemma3:12b model on a Razer Blade 18. ]]></media:text>
                                <media:title type="plain"><![CDATA[Ollama running the Gemma3:12b model on a Razer Blade 18. ]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/pu2BjA9FBQS3QdnPL99emD-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ Ollama Launches User-Friendly GUI App for Effortless Local AI on Windows 11 — Here's What You Need to Know ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/artificial-intelligence/ollamas-new-app-makes-using-local-ai-llms-on-your-windows-11-pc-a-breeze-no-more-need-to-chat-in-the-terminal</link>
                                                                            <description>
                            <![CDATA[ Ollama is already one of the best ways to run local LLMs on your PC, but with the release of a new GUI app for the tool, it's easier than ever. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">8QcWwSvSuMXoUPuVcUUCnF</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/Rg6VvHbBpkmRdUot5Z5w27-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Tue, 05 Aug 2025 12:34:42 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:40:31 +0000</updated>
                                                                                                                                            <category><![CDATA[Artificial Intelligence]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/Rg6VvHbBpkmRdUot5Z5w27-1280-80.jpg">
                                                            <media:credit><![CDATA[Windows Central]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[Ollama now has a GUI app for Windows 11.]]></media:description>                                                            <media:text><![CDATA[Ollama app running on Windows 11]]></media:text>
                                <media:title type="plain"><![CDATA[Ollama app running on Windows 11]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/Rg6VvHbBpkmRdUot5Z5w27-1280-80.jpg" />
                                                                                        </item>
                                <item>
                                                            <title><![CDATA[ How to install and use Ollama to run AI LLMs locally on your Windows 11 PC ]]></title>
                                                                                                                                                                                                <link>https://www.windowscentral.com/software-apps/how-to-install-and-use-ollama-to-run-ai-llms-on-your-windows-11-pc</link>
                                                                            <description>
                            <![CDATA[ If you want to install and use an AI LLM locally on your PC, one of the easiest ways to do it is with Ollama. Here's how to get up and rolling. ]]>
                                                                                                            </description>
                                                                                                                                <guid isPermaLink="false">wgFoLdDaBE2HbDQyJQCU9c</guid>
                                                                                                <enclosure url="https://cdn.mos.cms.futurecdn.net/pu2BjA9FBQS3QdnPL99emD-1280-80.jpg" type="image/jpeg" length="0"></enclosure>
                                                                        <pubDate>Mon, 12 May 2025 16:48:37 +0000</pubDate>                                                                                                                                <updated>Fri, 29 Aug 2025 15:37:44 +0000</updated>
                                                                                                                                            <category><![CDATA[Artificial Intelligence]]></category>
                                                                                                <author><![CDATA[ richard.devine@futurenet.com (Richard Devine) ]]></author>                    <dc:creator><![CDATA[ Richard Devine ]]></dc:creator>                                                                                                    <media:content type="image/jpeg" url="https://cdn.mos.cms.futurecdn.net/pu2BjA9FBQS3QdnPL99emD-1280-80.jpg">
                                                            <media:credit><![CDATA[Windows Central]]></media:credit>
                                                                                                                                                                        <media:description><![CDATA[Ollama is an easy-to-use tool that allows you to interact with local LLMs on your Windows PC, as well as through WSL, a Mac, or a Linux machine. ]]></media:description>                                                            <media:text><![CDATA[Ollama running the Gemma3:12b model on a Razer Blade 18. ]]></media:text>
                                <media:title type="plain"><![CDATA[Ollama running the Gemma3:12b model on a Razer Blade 18. ]]></media:title>
                                                    </media:content>
                                                    <media:thumbnail url="https://cdn.mos.cms.futurecdn.net/pu2BjA9FBQS3QdnPL99emD-1280-80.jpg" />
                                                                                        </item>
            </channel>
</rss>