Skip to content

Commit 7efa75a

Browse files
committed
neurips24
1 parent 902496e commit 7efa75a

File tree

15 files changed

+190
-81
lines changed

15 files changed

+190
-81
lines changed

_bibliography/references.bib

+21
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,26 @@
11
---
22
---
3+
4+
@inproceedings{
5+
jamadandi2024spectral,
6+
title={Spectral Graph Pruning Against Over-Squashing and Over-Smoothing},
7+
author={Adarsh Jamadandi and Celia Rubio-Madrigal and Rebekka Burkholz},
8+
booktitle={Thirty-eigth Conference on Neural Information Processing Systems},
9+
year={2024},
10+
url={https://openreview.net/forum?id=EMkrwJY2de},
11+
pdf={https://openreview.net/pdf?id=EMkrwJY2de},
12+
abstract={Message Passing Graph Neural Networks are known to suffer from two problems that are sometimes believed to be diametrically opposed: over-squashing and over-smoothing. The former results from topological bottlenecks that hamper the information flow from distant nodes and are mitigated by spectral gap maximization, primarily, by means of edge additions. However, such additions often promote over-smoothing that renders nodes of different classes less distinguishable. Inspired by the Braess phenomenon, we argue that deleting edges can address over-squashing and over-smoothing simultaneously. This insight explains how edge deletions can improve generalization, thus connecting spectral gap optimization to a seemingly disconnected objective of reducing computational resources by pruning graphs for lottery tickets. To this end, we propose a computationally effective spectral gap optimization framework to add or delete edges and demonstrate its effectiveness on the long range graph benchmark and on larger heterophilous datasets.},
13+
}
14+
15+
@inproceedings{
16+
mustafa2024training,
17+
title={Training GNNs in Balance by Dynamic Rescaling},
18+
author={Nimrah Mustafa and Rebekka Burkholz},
19+
booktitle={Thirty-eigth Conference on Neural Information Processing Systems},
20+
year={2024},
21+
abstract={Graph neural networks exhibiting a rescale invariance, like GATs, obey a conservation law of its parameters, which has been exploited to derive a balanced state that induces good initial trainability. Yet, finite learning rates as used in practice topple the network out of balance during training. This effect is even more pronounced with larger learning rates that tend to induce improved generalization but make the training dynamics less robust. To support even larger learning rates, we propose to dynamically balance the network according to a different criterion, based on relative gradients, that promotes faster and better. In combination with large learning rates and gradient clipping, dynamic rebalancing significantly improves generalization on real-world data. We observe that rescaling provides us with the flexibility to control the order in which network layers are trained. This leads to novel insights into similar phenomena as grokking, which can further boost generalization performance.},
22+
}
23+
324
@inproceedings{
425
mustafa2024gate,
526
title={{GATE}: How to Keep Out Intrusive Neighbors},

_data/alumni_members.yml

+15
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
- role: Research assistants (HiWis)
2+
members:
3+
- name: Harsha Nelaturu
4+
last_name: Nelaturu
5+
photo: harsha.nelaturu.jpg
6+
start_date: Aug 2023
7+
end_date: Jul 2024
8+
url: https://nelaturuharsha.github.io/
9+
10+
- role: Visiting students
11+
members:
12+
- name: Otto Piramuthu
13+
last_name: Piramuthu
14+
start_date: June 2024
15+
end_date: August 2024

_data/news.yml

+3
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
- date: 25. September 2024
2+
headline: "Our two papers were accepted at NeurIPS 2024 (see [publications](/publications))!"
3+
14
- date: 1. June 2024
25
headline: "Welcome to Chao, Rahul, and Dong!"
36

_data/team_members.yml

+1-16
Original file line numberDiff line numberDiff line change
@@ -21,47 +21,41 @@
2121
- name: Advait Gadhikar
2222
last_name: Gadhikar
2323
photo: advait.gadhikar.jpg
24-
role: PhD student
2524
start_date: Apr 2022
2625
2726
url: https://advaitgadhikar.github.io/
2827

2928
- name: Nimrah Mustafa
3029
last_name: Mustafa
3130
photo: nimrah.mustafa.jpg
32-
role: PhD student
3331
start_date: Jun 2022
3432
3533
url: https://cispa.de/en/people/c01nimu
3634

3735
- name: Celia Rubio-Madrigal
3836
last_name: Rubio-Madrigal
3937
photo: celia.rubio-madrigal.jpg
40-
role: PhD student
4138
start_date: Oct 2023
4239
4340
url: https://celrm.github.io/
4441

4542
- name: Tom Jacobs
4643
last_name: Jacobs
4744
photo: tom.jacobs.jpg
48-
role: PhD student
4945
start_date: Feb 2024
5046
5147
url: https://cispa.de/en/people/c01toja
5248

5349
- name: Rahul Nittala
5450
last_name: Nittala
5551
photo:
56-
role: PhD student
5752
start_date: Jun 2024
5853
5954
url: https://cispa.de/en/people/c01rani
6055

6156
- name: Dong Sun
6257
last_name: Sun
6358
photo: dong.sun.jpg
64-
role: PhD student
6559
start_date: Jul 2024
6660
6761
url: https://cispa.de/en/people/c01dosu
@@ -71,15 +65,6 @@
7165
- name: Adarsh Jamadandi
7266
last_name: Jamadandi
7367
photo: adarsh.jamadandi.jpg
74-
role: Research assistant
7568
start_date: Nov 2022
7669
77-
url: https://adarshmj.github.io
78-
79-
- name: Harsha Nelaturu
80-
last_name: Nelaturu
81-
photo: harsha.nelaturu.jpg
82-
role: Research assistant
83-
start_date: Aug 2023
84-
85-
url: https://nelaturuharsha.github.io/
70+
url: https://adarshmj.github.io

_layouts/default.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
{% include header.html %}
1010

11-
<div class="container-fluid">
11+
<div class="container-fluid" style="width: 100%;">
1212
<div class="row">
1313
{{ content }}
1414
</div>

_pages/team.md

+15-45
Original file line numberDiff line numberDiff line change
@@ -56,66 +56,36 @@ permalink: /team/
5656
{% endfor %}
5757
{% endfor %}
5858

59-
{% if site.data.alumni_members.size > 0 %}
59+
60+
6061

6162
## Alumni
6263

6364
{% assign number_printed = 0 %}
64-
{% for member in site.data.alumni_members %}
65-
65+
{% for group in site.data.alumni_members %}
6666
{% assign even_odd = number_printed | modulo: 2 %}
67-
6867
{% if even_odd == 0 %}
6968
<div class="row">
7069
{% endif %}
7170

72-
<div class="col-sm-6 clearfix">
73-
<img src="{{ site.url }}{{ site.baseurl }}/images/team/{{ member.photo }}" class="img-responsive" width="25%" style="float: left" />
74-
<h4>{{ member.name }}</h4>
75-
<i>{{ member.duration }} <br> Role: {{ member.info }}</i>
76-
<ul style="overflow: hidden">
77-
78-
</ul>
79-
</div>
80-
81-
{% assign number_printed = number_printed | plus: 1 %}
82-
83-
{% if even_odd == 1 %}
84-
</div>
85-
{% endif %}
8671

72+
<div class="col-sm-6 clearfix">
73+
<h3>{{group.role}}</h3>
74+
<ul>
75+
{% for member in group.members %}
76+
<li style="margin-left:-16px;"
77+
>{% if member.url %}<a href="{{ member.url }}">{{ member.name }}</a>{% else %}{{ member.name }}{% endif %}:
78+
<i>{{ member.start_date }} - {{ member.end_date }}</i>.</li>
8779
{% endfor %}
88-
89-
{% assign even_odd = number_printed | modulo: 2 %}
90-
{% if even_odd == 1 %}
80+
</ul>
9181
</div>
92-
{% endif %}
93-
{% endif %}
94-
95-
{% if site.data.alumni_visitors > 0 %}
96-
## Former visitors, BSc/ MSc students
97-
<div class="row">
9882

99-
<div class="col-sm-4 clearfix">
100-
<h4>Visitors</h4>
101-
{% for member in site.data.alumni_visitors %}
102-
{{ member.name }}
103-
{% endfor %}
83+
{% if even_odd == 1 or forloop.last %}
10484
</div>
85+
{% endif %}
10586

106-
<div class="col-sm-4 clearfix">
107-
<h4>Master students</h4>
108-
{% for member in site.data.alumni_msc %}
109-
{{ member.name }}
110-
{% endfor %}
111-
</div>
87+
{% assign number_printed = number_printed | plus: 1 %}
11288

113-
<div class="col-sm-4 clearfix">
114-
<h4>Bachelor Students</h4>
115-
{% for member in site.data.alumni_bsc %}
116-
{{ member.name }}
11789
{% endfor %}
118-
</div>
11990

120-
</div>
121-
{% endif %}
91+
<br>

_site/404.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@
6464
</div>
6565

6666

67-
<div class="container-fluid">
67+
<div class="container-fluid" style="width: 100%;">
6868
<div class="row">
6969
<p>Sorry, but the page you were trying to view does not exist.</p>
7070

File renamed without changes.

_site/index.html

+6-6
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@
6464
</div>
6565

6666

67-
<div class="container-fluid">
67+
<div class="container-fluid" style="width: 100%;">
6868
<div class="row">
6969
<div id="homeid" class="col-sm-8">
7070

@@ -120,6 +120,11 @@ <h1 id="relational-ml-lab">Relational ML Lab</h1>
120120
<h3>News</h3>
121121
<div class="well">
122122

123+
<b>25 Sep 2024</b>
124+
<p>Our two papers were accepted at NeurIPS 2024 (see <a href="/publications">publications</a>)!</p>
125+
126+
<hr/>
127+
123128
<b>01 Jun 2024</b>
124129
<p>Welcome to Chao, Rahul, and Dong!</p>
125130

@@ -140,11 +145,6 @@ <h3>News</h3>
140145

141146
<hr/>
142147

143-
<b>01 Oct 2023</b>
144-
<p>Welcome Celia!</p>
145-
146-
<hr/>
147-
148148

149149
<a href="/news.html">... see all news</a>
150150
</div>

_site/news.html

+6-1
Original file line numberDiff line numberDiff line change
@@ -64,13 +64,18 @@
6464
</div>
6565

6666

67-
<div class="container-fluid">
67+
<div class="container-fluid" style="width: 100%;">
6868
<div class="row">
6969
<div id="textid" class="col-sm-12">
7070
<h1 id="news">News</h1>
7171

7272
<hr />
7373

74+
<h4>25 September 2024</h4>
75+
<p>Our two papers were accepted at NeurIPS 2024 (see <a href="/publications">publications</a>)!</p>
76+
77+
<hr />
78+
7479
<h4>01 June 2024</h4>
7580
<p>Welcome to Chao, Rahul, and Dong!</p>
7681

_site/openings.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@
6464
</div>
6565

6666

67-
<div class="container-fluid">
67+
<div class="container-fluid" style="width: 100%;">
6868
<div class="row">
6969
<div id="textid" class="col-sm-12">
7070
<h1 id="open-positions">Open positions</h1>

_site/publications/index.html

+97-2
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@
6464
</div>
6565

6666

67-
<div class="container-fluid">
67+
<div class="container-fluid" style="width: 100%;">
6868
<div class="row">
6969
<div id="publications" class="col-sm-12">
7070
<h1 id="publications">Publications</h1>
@@ -80,12 +80,107 @@ <h1 id="publications">Publications</h1>
8080
<option value="Nittala">Rahul Nittala</option>
8181
<option value="Sun">Dong Sun</option>
8282
<option value="Jamadandi">Adarsh Jamadandi</option>
83-
<option value="Nelaturu">Harsha Nelaturu</option>
8483
</select></p>
8584

8685
<ol class="bibliography"><li><!-- _layouts/bib.html -->
8786

8887

88+
<div class="row" style="margin-top: 30px;" id="bib-jamadandi2024spectral" authors="JamadandiRubio-MadrigalBurkholz">
89+
<div class="preview"><img class="preview z-depth-1 rounded" src="/images/publications/default.png" /></div>
90+
<!-- Entry bib key -->
91+
<div id="jamadandi2024spectral" class="col-sm-10">
92+
<!-- Title -->
93+
94+
<div class="title"><a href="https://openreview.net/forum?id=EMkrwJY2de"><b>Spectral Graph Pruning Against Over-Squashing and Over-Smoothing</b></a></div>
95+
96+
<!-- Author -->
97+
<div class="author">
98+
99+
100+
Adarsh Jamadandi,&nbsp;Celia Rubio-Madrigal,&nbsp;and&nbsp;Rebekka Burkholz</div>
101+
102+
<!-- Journal/Book title and date -->
103+
104+
105+
<div class="periodical">
106+
<em>In Thirty-eigth Conference on Neural Information Processing Systems</em>, 2024
107+
</div>
108+
<div class="periodical">
109+
110+
</div>
111+
112+
<!-- Links/Buttons -->
113+
<div class="links"><a class="conf btn btn-sm z-depth-0">NeurIPS</a><a class="bibtex btn btn-sm z-depth-0" role="button">Bib</a>
114+
<a class="abstract btn btn-sm z-depth-0" role="button">Abs</a>
115+
<a href="https://openreview.net/pdf?id=EMkrwJY2de" class="btn btn-sm z-depth-0" role="button">PDF</a>
116+
</div>
117+
118+
119+
<!-- Hidden abstract block -->
120+
<div class="abstract hidden">
121+
<p>Message Passing Graph Neural Networks are known to suffer from two problems that are sometimes believed to be diametrically opposed: over-squashing and over-smoothing. The former results from topological bottlenecks that hamper the information flow from distant nodes and are mitigated by spectral gap maximization, primarily, by means of edge additions. However, such additions often promote over-smoothing that renders nodes of different classes less distinguishable. Inspired by the Braess phenomenon, we argue that deleting edges can address over-squashing and over-smoothing simultaneously. This insight explains how edge deletions can improve generalization, thus connecting spectral gap optimization to a seemingly disconnected objective of reducing computational resources by pruning graphs for lottery tickets. To this end, we propose a computationally effective spectral gap optimization framework to add or delete edges and demonstrate its effectiveness on the long range graph benchmark and on larger heterophilous datasets.</p>
122+
</div><!-- Hidden bibtex block -->
123+
<div class="bibtex hidden">
124+
<figure class="highlight"><pre><code class="language-bibtex" data-lang="bibtex"><span class="nc">@inproceedings</span><span class="p">{</span><span class="nl">jamadandi2024spectral</span><span class="p">,</span>
125+
<span class="na">title</span> <span class="p">=</span> <span class="s">{Spectral Graph Pruning Against Over-Squashing and Over-Smoothing}</span><span class="p">,</span>
126+
<span class="na">author</span> <span class="p">=</span> <span class="s">{Jamadandi, Adarsh and Rubio-Madrigal, Celia and Burkholz, Rebekka}</span><span class="p">,</span>
127+
<span class="na">booktitle</span> <span class="p">=</span> <span class="s">{Thirty-eigth Conference on Neural Information Processing Systems}</span><span class="p">,</span>
128+
<span class="na">year</span> <span class="p">=</span> <span class="s">{2024}</span><span class="p">,</span>
129+
<span class="na">url</span> <span class="p">=</span> <span class="s">{https://openreview.net/forum?id=EMkrwJY2de}</span><span class="p">,</span>
130+
<span class="p">}</span></code></pre></figure>
131+
</div>
132+
</div>
133+
</div></li>
134+
<li><!-- _layouts/bib.html -->
135+
136+
137+
<div class="row" style="margin-top: 30px;" id="bib-mustafa2024training" authors="MustafaBurkholz">
138+
<div class="preview"><img class="preview z-depth-1 rounded" src="/images/publications/default.png" /></div>
139+
<!-- Entry bib key -->
140+
<div id="mustafa2024training" class="col-sm-10">
141+
<!-- Title -->
142+
143+
<div class="title"><b>Training GNNs in Balance by Dynamic Rescaling</b></div>
144+
145+
<!-- Author -->
146+
<div class="author">
147+
148+
149+
Nimrah Mustafa,&nbsp;and&nbsp;Rebekka Burkholz</div>
150+
151+
<!-- Journal/Book title and date -->
152+
153+
154+
<div class="periodical">
155+
<em>In Thirty-eigth Conference on Neural Information Processing Systems</em>, 2024
156+
</div>
157+
<div class="periodical">
158+
159+
</div>
160+
161+
<!-- Links/Buttons -->
162+
<div class="links"><a class="conf btn btn-sm z-depth-0">NeurIPS</a><a class="bibtex btn btn-sm z-depth-0" role="button">Bib</a>
163+
<a class="abstract btn btn-sm z-depth-0" role="button">Abs</a>
164+
</div>
165+
166+
167+
<!-- Hidden abstract block -->
168+
<div class="abstract hidden">
169+
<p>Graph neural networks exhibiting a rescale invariance, like GATs, obey a conservation law of its parameters, which has been exploited to derive a balanced state that induces good initial trainability. Yet, finite learning rates as used in practice topple the network out of balance during training. This effect is even more pronounced with larger learning rates that tend to induce improved generalization but make the training dynamics less robust. To support even larger learning rates, we propose to dynamically balance the network according to a different criterion, based on relative gradients, that promotes faster and better. In combination with large learning rates and gradient clipping, dynamic rebalancing significantly improves generalization on real-world data. We observe that rescaling provides us with the flexibility to control the order in which network layers are trained. This leads to novel insights into similar phenomena as grokking, which can further boost generalization performance.</p>
170+
</div><!-- Hidden bibtex block -->
171+
<div class="bibtex hidden">
172+
<figure class="highlight"><pre><code class="language-bibtex" data-lang="bibtex"><span class="nc">@inproceedings</span><span class="p">{</span><span class="nl">mustafa2024training</span><span class="p">,</span>
173+
<span class="na">title</span> <span class="p">=</span> <span class="s">{Training GNNs in Balance by Dynamic Rescaling}</span><span class="p">,</span>
174+
<span class="na">author</span> <span class="p">=</span> <span class="s">{Mustafa, Nimrah and Burkholz, Rebekka}</span><span class="p">,</span>
175+
<span class="na">booktitle</span> <span class="p">=</span> <span class="s">{Thirty-eigth Conference on Neural Information Processing Systems}</span><span class="p">,</span>
176+
<span class="na">year</span> <span class="p">=</span> <span class="s">{2024}</span>
177+
<span class="p">}</span></code></pre></figure>
178+
</div>
179+
</div>
180+
</div></li>
181+
<li><!-- _layouts/bib.html -->
182+
183+
89184
<div class="row" style="margin-top: 30px;" id="bib-mustafa2024gate" authors="MustafaBurkholz">
90185
<div class="preview"><img class="preview z-depth-1 rounded" src="/images/publications/default.png" /></div>
91186
<!-- Entry bib key -->

0 commit comments

Comments
 (0)