Skip to content

Commit 20cfe8e

Browse files
committed
paper url
1 parent 7efa75a commit 20cfe8e

File tree

2 files changed

+12
-8
lines changed

2 files changed

+12
-8
lines changed

_bibliography/references.bib

+4-2
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ @inproceedings{
55
jamadandi2024spectral,
66
title={Spectral Graph Pruning Against Over-Squashing and Over-Smoothing},
77
author={Adarsh Jamadandi and Celia Rubio-Madrigal and Rebekka Burkholz},
8-
booktitle={Thirty-eigth Conference on Neural Information Processing Systems},
8+
booktitle={Thirty-eighth Conference on Neural Information Processing Systems},
99
year={2024},
1010
url={https://openreview.net/forum?id=EMkrwJY2de},
1111
pdf={https://openreview.net/pdf?id=EMkrwJY2de},
@@ -16,8 +16,10 @@ @inproceedings{
1616
mustafa2024training,
1717
title={Training GNNs in Balance by Dynamic Rescaling},
1818
author={Nimrah Mustafa and Rebekka Burkholz},
19-
booktitle={Thirty-eigth Conference on Neural Information Processing Systems},
19+
booktitle={Thirty-eighth Conference on Neural Information Processing Systems},
2020
year={2024},
21+
url={https://openreview.net/forum?id=IfZwSRpqHl},
22+
pdf={https://openreview.net/pdf?id=IfZwSRpqHl},
2123
abstract={Graph neural networks exhibiting a rescale invariance, like GATs, obey a conservation law of its parameters, which has been exploited to derive a balanced state that induces good initial trainability. Yet, finite learning rates as used in practice topple the network out of balance during training. This effect is even more pronounced with larger learning rates that tend to induce improved generalization but make the training dynamics less robust. To support even larger learning rates, we propose to dynamically balance the network according to a different criterion, based on relative gradients, that promotes faster and better. In combination with large learning rates and gradient clipping, dynamic rebalancing significantly improves generalization on real-world data. We observe that rescaling provides us with the flexibility to control the order in which network layers are trained. This leads to novel insights into similar phenomena as grokking, which can further boost generalization performance.},
2224
}
2325

_site/publications/index.html

+8-6
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ <h1 id="publications">Publications</h1>
103103

104104

105105
<div class="periodical">
106-
<em>In Thirty-eigth Conference on Neural Information Processing Systems</em>, 2024
106+
<em>In Thirty-eighth Conference on Neural Information Processing Systems</em>, 2024
107107
</div>
108108
<div class="periodical">
109109

@@ -124,7 +124,7 @@ <h1 id="publications">Publications</h1>
124124
<figure class="highlight"><pre><code class="language-bibtex" data-lang="bibtex"><span class="nc">@inproceedings</span><span class="p">{</span><span class="nl">jamadandi2024spectral</span><span class="p">,</span>
125125
<span class="na">title</span> <span class="p">=</span> <span class="s">{Spectral Graph Pruning Against Over-Squashing and Over-Smoothing}</span><span class="p">,</span>
126126
<span class="na">author</span> <span class="p">=</span> <span class="s">{Jamadandi, Adarsh and Rubio-Madrigal, Celia and Burkholz, Rebekka}</span><span class="p">,</span>
127-
<span class="na">booktitle</span> <span class="p">=</span> <span class="s">{Thirty-eigth Conference on Neural Information Processing Systems}</span><span class="p">,</span>
127+
<span class="na">booktitle</span> <span class="p">=</span> <span class="s">{Thirty-eighth Conference on Neural Information Processing Systems}</span><span class="p">,</span>
128128
<span class="na">year</span> <span class="p">=</span> <span class="s">{2024}</span><span class="p">,</span>
129129
<span class="na">url</span> <span class="p">=</span> <span class="s">{https://openreview.net/forum?id=EMkrwJY2de}</span><span class="p">,</span>
130130
<span class="p">}</span></code></pre></figure>
@@ -140,7 +140,7 @@ <h1 id="publications">Publications</h1>
140140
<div id="mustafa2024training" class="col-sm-10">
141141
<!-- Title -->
142142

143-
<div class="title"><b>Training GNNs in Balance by Dynamic Rescaling</b></div>
143+
<div class="title"><a href="https://openreview.net/forum?id=IfZwSRpqHl"><b>Training GNNs in Balance by Dynamic Rescaling</b></a></div>
144144

145145
<!-- Author -->
146146
<div class="author">
@@ -152,7 +152,7 @@ <h1 id="publications">Publications</h1>
152152

153153

154154
<div class="periodical">
155-
<em>In Thirty-eigth Conference on Neural Information Processing Systems</em>, 2024
155+
<em>In Thirty-eighth Conference on Neural Information Processing Systems</em>, 2024
156156
</div>
157157
<div class="periodical">
158158

@@ -161,6 +161,7 @@ <h1 id="publications">Publications</h1>
161161
<!-- Links/Buttons -->
162162
<div class="links"><a class="conf btn btn-sm z-depth-0">NeurIPS</a><a class="bibtex btn btn-sm z-depth-0" role="button">Bib</a>
163163
<a class="abstract btn btn-sm z-depth-0" role="button">Abs</a>
164+
<a href="https://openreview.net/pdf?id=IfZwSRpqHl" class="btn btn-sm z-depth-0" role="button">PDF</a>
164165
</div>
165166

166167

@@ -172,8 +173,9 @@ <h1 id="publications">Publications</h1>
172173
<figure class="highlight"><pre><code class="language-bibtex" data-lang="bibtex"><span class="nc">@inproceedings</span><span class="p">{</span><span class="nl">mustafa2024training</span><span class="p">,</span>
173174
<span class="na">title</span> <span class="p">=</span> <span class="s">{Training GNNs in Balance by Dynamic Rescaling}</span><span class="p">,</span>
174175
<span class="na">author</span> <span class="p">=</span> <span class="s">{Mustafa, Nimrah and Burkholz, Rebekka}</span><span class="p">,</span>
175-
<span class="na">booktitle</span> <span class="p">=</span> <span class="s">{Thirty-eigth Conference on Neural Information Processing Systems}</span><span class="p">,</span>
176-
<span class="na">year</span> <span class="p">=</span> <span class="s">{2024}</span>
176+
<span class="na">booktitle</span> <span class="p">=</span> <span class="s">{Thirty-eighth Conference on Neural Information Processing Systems}</span><span class="p">,</span>
177+
<span class="na">year</span> <span class="p">=</span> <span class="s">{2024}</span><span class="p">,</span>
178+
<span class="na">url</span> <span class="p">=</span> <span class="s">{https://openreview.net/forum?id=IfZwSRpqHl}</span><span class="p">,</span>
177179
<span class="p">}</span></code></pre></figure>
178180
</div>
179181
</div>

0 commit comments

Comments
 (0)