<?xml version="1.0" encoding="utf-8"?>
<export-example>
  <doc>
    <id>2078</id>
    <completedYear>2025</completedYear>
    <publishedYear/>
    <thesisYearAccepted/>
    <language>eng</language>
    <pageFirst/>
    <pageLast/>
    <pageNumber/>
    <edition/>
    <issue/>
    <volume/>
    <type>conferenceobject</type>
    <publisherName>TIB Open Publishing</publisherName>
    <publisherPlace>Hannover</publisherPlace>
    <creatingCorporation/>
    <contributingCorporation/>
    <belongsToBibliography>1</belongsToBibliography>
    <completedDate>--</completedDate>
    <publishedDate>--</publishedDate>
    <thesisDateAccepted>--</thesisDateAccepted>
    <title language="eng">A Novel Exponential Continuous Learning Rate Adaption Gradient Descent Optimization Method</title>
    <abstract language="eng">We present two novel, fast gradient based optimizer algorithms with dynamic learning rate. The main idea is to adapt the learning rate α by situational awareness, mainly striving for orthogonal neighboring gradients. The method has a high success and fast convergence rate and relies much less on hand-tuned hyper-parameters, providing greater universality. It scales linearly (of order O(n)) with dimension and is rotation invariant, thereby overcoming known limitations. The method is presented in two variants C2Min and P2Min, with slightly different control. Their impressive performance is demonstrated by experiments on several benchmark data-sets (ranging from MNIST to Tiny ImageNet) against the state-of-the-art optimizers Adam and Lion.</abstract>
    <parentTitle language="deu">Wildauer Konferenz für Künstliche Intelligenz 2025 (WiKKI25)</parentTitle>
    <identifier type="urn">urn:nbn:de:kobv:526-opus4-20785</identifier>
    <enrichment key="opus.import.date">2025-09-24T11:39:53+00:00</enrichment>
    <enrichment key="opus.source">sword</enrichment>
    <enrichment key="opus.import.user">sword</enrichment>
    <enrichment key="DOI_VoR">https://doi.org/10.52825/th-wildau-ensp.v2i.2939</enrichment>
    <enrichment key="SourceTitle">Kleinsorge, A., Fauck, A., &amp; Kupper, S. (2025). A Novel Exponential Continuous Learning Rate Adaption Gradient Descent Optimization Method. TH Wildau Engineering and Natural Sciences Proceedings , 2. https://doi.org/10.52825/th-wildau-ensp.v2i.2939</enrichment>
    <enrichment key="opus.doi.autoCreate">false</enrichment>
    <enrichment key="opus.urn.autoCreate">true</enrichment>
    <licence>Creative Commons - CC BY - Namensnennung 4.0 International</licence>
    <author>Alexander Kleinsorge</author>
    <author>Alexander Fauck</author>
    <author>Stefan Kupper</author>
    <series>
      <title>TH Wildau Engineering and Natural Sciences Proceedings</title>
      <number>2</number>
    </series>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>neural network</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>optimizer</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>training</value>
    </subject>
    <collection role="ddc" number="006">Spezielle Computerverfahren</collection>
    <collection role="ddc" number="519">Wahrscheinlichkeiten, angewandte Mathematik</collection>
    <collection role="institutes" number="">Fachbereich Ingenieur- und Naturwissenschaften</collection>
    <collection role="open_access" number="">open_access</collection>
    <collection role="Import" number="import">Import</collection>
    <collection role="Funding" number="4">Publikationsfonds für Open-Access-Monografien des Landes Brandenburg</collection>
    <thesisPublisher>Technische Hochschule Wildau</thesisPublisher>
    <file>https://opus4.kobv.de/opus4-th-wildau/files/2078/2939_Kleinsorge_et_al.pdf</file>
  </doc>
</export-example>
