<?xml version="1.0" encoding="UTF-8"?><rss version="2.0"
	xmlns:content="http://purl.org/rss/1.0/modules/content/"
	xmlns:wfw="http://wellformedweb.org/CommentAPI/"
	xmlns:dc="http://purl.org/dc/elements/1.1/"
	xmlns:atom="http://www.w3.org/2005/Atom"
	xmlns:sy="http://purl.org/rss/1.0/modules/syndication/"
	xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
	>

<channel>
	<title>Ollama &#8211; Suverent</title>
	<atom:link href="https://suverent.org/tag/ollama/feed/" rel="self" type="application/rss+xml" />
	<link>https://suverent.org</link>
	<description>Life, stories and all between ...</description>
	<lastBuildDate>Thu, 19 Feb 2026 11:30:13 +0000</lastBuildDate>
	<language>en-US</language>
	<sy:updatePeriod>
	hourly	</sy:updatePeriod>
	<sy:updateFrequency>
	1	</sy:updateFrequency>
	<generator>https://wordpress.org/?v=6.9.1</generator>
	<item>
		<title>Home Assistant integrated complete local Norwegian LLM with Ollama and Whisper</title>
		<link>https://suverent.org/2026/02/12/home-assistant-integrated-complete-local-norwegian-llm-with-ollama-and-whisper/</link>
					<comments>https://suverent.org/2026/02/12/home-assistant-integrated-complete-local-norwegian-llm-with-ollama-and-whisper/#respond</comments>
		
		<dc:creator><![CDATA[Philippe]]></dc:creator>
		<pubDate>Thu, 12 Feb 2026 12:16:25 +0000</pubDate>
				<category><![CDATA[Home Assistant]]></category>
		<category><![CDATA[LLM]]></category>
		<category><![CDATA[Tech stuff]]></category>
		<category><![CDATA[Local]]></category>
		<category><![CDATA[Ollama]]></category>
		<category><![CDATA[Piper]]></category>
		<category><![CDATA[Voice PE]]></category>
		<category><![CDATA[Whisper]]></category>
		<guid isPermaLink="false">https://suverent.org/?p=706</guid>

					<description><![CDATA[Update 19.02.26: Added Piper docker config running together with Whisper with GPU acceleration. See instructions below. Next step is to add voice clone (currently processing) Introduction I&#8217;m a big supporter of open source community, data privacy and ownership. At the same time I have been curious of the possibilities of local LLM (Large Language Model) [&#8230;]]]></description>
		
					<wfw:commentRss>https://suverent.org/2026/02/12/home-assistant-integrated-complete-local-norwegian-llm-with-ollama-and-whisper/feed/</wfw:commentRss>
			<slash:comments>0</slash:comments>
		
		
			</item>
	</channel>
</rss>
