<?xml version="1.0" encoding="UTF-8" ?>
<rss
    version="2.0"
    xmlns:atom="http://www.w3.org/2005/Atom"
    xmlns:content="http://purl.org/rss/1.0/modules/content/"
    xmlns:webfeeds="http://webfeeds.org/rss/1.0"
    xmlns:media="http://search.yahoo.com/mrss/"
    >
    <channel>
        <title>Long-short-term-memory-networks Tag - Viblo</title>
        <link>https://viblo.asia/rss</link>
        <description><![CDATA[Free service for technical knowledge sharing]]></description>
        <atom:link href="https://viblo.asia/rss/tags/long-short-term-memory-networks.rss" rel="self"></atom:link>
                <copyright>Sun* Inc.</copyright>
                                                <webfeeds:logo>https://viblo.asia/logo_full.svg</webfeeds:logo>
        
                                <language>vi-vn</language>
        <lastBuildDate>2026-04-05T23:17:51+07:00</lastBuildDate>
                <item>
            <title><![CDATA[Tìm Hiểu LSTM: Bí Quyết Giữ Thông Tin Lâu Dài Hiệu Quả  ]]></title>
                        <link>https://viblo.asia/p/tim-hieu-lstm-bi-quyet-giu-thong-tin-lau-dai-hieu-qua-MG24BaezVz3</link>
            <guid isPermaLink="true">https://viblo.asia/p/tim-hieu-lstm-bi-quyet-giu-thong-tin-lau-dai-hieu-qua-MG24BaezVz3</guid>
            <description><![CDATA[Trong bài trước Recurrent Neural Networks (RNNs), Clearly Explained!!!, chúng ta đã khám phá RNN mạng nơ-ron truy hồi xử lý dữ liệu chuỗi. Tuy nhiên, ...]]></description>
                        <dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Long Nguyễn Thành</dc:creator>
            <pubDate>2025-02-11 10:27:07</pubDate>
                                                                                                        </item>
                <item>
            <title><![CDATA[Tản mạn về Self Attention]]></title>
                        <link>https://viblo.asia/p/tan-man-ve-self-attention-07LKXoq85V4</link>
            <guid isPermaLink="true">https://viblo.asia/p/tan-man-ve-self-attention-07LKXoq85V4</guid>
            <description><![CDATA[Self attention hay intra-attention - cụm từ chắc hẳn đã được đồng đạo trong giới Machine Learning biết đến nhiều qua một bài báo rất nổi tiếng Attenti...]]></description>
                        <dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Bui Quang Manh</dc:creator>
            <pubDate>2021-03-29 10:25:19</pubDate>
                                                                                                        </item>
            </channel>
</rss>
