<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url>
<loc>https://openlit.io</loc>
<lastmod>2025-03-14</lastmod>
</url>
<url>
<loc>https://openlit.io/blogs</loc>
<lastmod>2025-03-14</lastmod>
</url>
<url>
<loc>https://openlit.io/about-us</loc>
<lastmod>2025-03-14</lastmod>
</url>
<url>
<loc>https://openlit.io/pricing</loc>
<lastmod>2025-03-14</lastmod>
</url>
<url>
<loc>https://openlit.io/compare</loc>
<lastmod>2025-03-14</lastmod>
</url>
<url>
<loc>https://openlit.io/privacy-policy</loc>
<lastmod>2025-03-09</lastmod>
</url>
<url>
<loc>https://openlit.io/terms</loc>
<lastmod>2025-03-09</lastmod>
</url>
<url>
<loc>https://openlit.io/compare/openlit-vs-langfuse</loc>
<lastmod>2025-03-14</lastmod>
</url>
<url>
<loc>https://openlit.io/compare/openlit-vs-helicone</loc>
<lastmod>2025-03-14</lastmod>
</url>
<url>
<loc>https://openlit.io/compare/openlit-vs-langsmith</loc>
<lastmod>2025-03-14</lastmod>
</url>
<url>
<loc>https://openlit.io/compare/openlit-vs-datadog</loc>
<lastmod>2025-03-14</lastmod>
</url>
<url>
<loc>https://openlit.io/blogs/gpu-monitoring-for-llm-inference-what-to-track-and-why-it-matters</loc>
<lastmod>2026-03-27T00:00:00.000Z</lastmod>
</url>
<url>
<loc>https://openlit.io/blogs/how-to-add-observability-to-your-llm-app-in-2-minutes-with-opentelemetry</loc>
<lastmod>2026-03-26T00:00:00.000Z</lastmod>
</url>
<url>
<loc>https://openlit.io/blogs/observability-with-openlit</loc>
<lastmod>2025-03-01T00:00:00.000Z</lastmod>
</url>
<url>
<loc>https://openlit.io/blogs/openlit-fleet-hub-at-scale</loc>
<lastmod>2026-03-10T00:00:00.000Z</lastmod>
</url>
<url>
<loc>https://openlit.io/blogs/openlit-openwebui</loc>
<lastmod>2025-03-01T00:00:00.000Z</lastmod>
</url>
<url>
<loc>https://openlit.io/blogs/pipeline-for-llm-apps</loc>
<lastmod>2025-03-01T00:00:00.000Z</lastmod>
</url>
<url>
<loc>https://openlit.io/blogs/protect-prompt-injection</loc>
<lastmod>2025-03-01T00:00:00.000Z</lastmod>
</url>
</urlset>
