
  <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
    <channel>
      <title>黑环实验室</title>
      <link>https://blog.blackhalo.top/blog</link>
      <description>记录前沿科技、神秘事件与星辰大海的中文博客</description>
      <language>zh-CN</language>
      <managingEditor> (Black Halo Labs)</managingEditor>
      <webMaster> (Black Halo Labs)</webMaster>
      <lastBuildDate>Sat, 16 May 2026 00:00:00 GMT</lastBuildDate>
      <atom:link href="https://blog.blackhalo.top/tags/大语言模型/feed.xml" rel="self" type="application/rss+xml"/>
      
  <item>
    <guid>https://blog.blackhalo.top/blog/transformer-revolution</guid>
    <title>从 Attention 到 ChatGPT：Transformer 如何改写人工智能</title>
    <link>https://blog.blackhalo.top/blog/transformer-revolution</link>
    <description>回顾 Vaswani 等人 2017 年提出的 Transformer 架构，以及它如何成为当今大语言模型的基石。</description>
    <pubDate>Sat, 16 May 2026 00:00:00 GMT</pubDate>
    <author> (Black Halo Labs)</author>
    <category>人工智能</category><category>大语言模型</category><category>机器学习</category>
  </item>

    </channel>
  </rss>
