<?xml version="1.0" encoding="UTF-8"?>
<!-- generator="FeedCreator 1.8" -->
<?xml-stylesheet href="https://wiki.eecs.yorku.ca/lab/MLL/lib/exe/css.php?s=feed" type="text/css"?>
<rdf:RDF
    xmlns="http://purl.org/rss/1.0/"
    xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
    xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
    xmlns:dc="http://purl.org/dc/elements/1.1/">
    <channel rdf:about="https://wiki.eecs.yorku.ca/lab/MLL/feed.php">
        <title>Machine Learning Lab - projects:asgd</title>
        <description></description>
        <link>https://wiki.eecs.yorku.ca/lab/MLL/</link>
        <image rdf:resource="https://wiki.eecs.yorku.ca/lab/MLL/_media/wiki:dokuwiki-128.png" />
       <dc:date>2026-04-11T02:44:33+00:00</dc:date>
        <items>
            <rdf:Seq>
                <rdf:li rdf:resource="https://wiki.eecs.yorku.ca/lab/MLL/projects:asgd:start?rev=1436709897&amp;do=diff"/>
            </rdf:Seq>
        </items>
    </channel>
    <image rdf:about="https://wiki.eecs.yorku.ca/lab/MLL/_media/wiki:dokuwiki-128.png">
        <title>Machine Learning Lab</title>
        <link>https://wiki.eecs.yorku.ca/lab/MLL/</link>
        <url>https://wiki.eecs.yorku.ca/lab/MLL/_media/wiki:dokuwiki-128.png</url>
    </image>
    <item rdf:about="https://wiki.eecs.yorku.ca/lab/MLL/projects:asgd:start?rev=1436709897&amp;do=diff">
        <dc:format>text/html</dc:format>
        <dc:date>2015-07-12T14:04:57+00:00</dc:date>
        <dc:creator>Anonymous (anonymous@undisclosed.example.com)</dc:creator>
        <title>Annealing SGD</title>
        <link>https://wiki.eecs.yorku.ca/lab/MLL/projects:asgd:start?rev=1436709897&amp;do=diff</link>
        <description>Annealing SGD

Annealed Stochastic Gradient Descent (AGD)






Here, we propose a novel annealed gradient descent (AGD) method for
deep learning. AGD optimizes a sequence of gradually improved smoother mosaic
functions that approximate the original non-convex objective function according
to an annealing schedule during optimization process. We present a theoretical
analysis on its convergence properties and learning speed. The proposed AGD
algorithm is applied to learning deep neural networks (…</description>
    </item>
</rdf:RDF>
