Skip to content

Commit 3a8d3a0

Browse files
tesujiCentril
authored andcommittedApr 25, 2019
Run cargo fmt on whole project (#272)
* Fix fallback in #270 Wrong order of arguments in `format!`. * fmt: Remove invalid struct_lit_width option * ci: Reformat travis config * ci: Checking style before build * fmt: Run cargo fmt on whole project
1 parent 76c141b commit 3a8d3a0

19 files changed

+950
-617
lines changed
 

‎.travis.yml

+27-24
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ notifications:
33
email: false
44
language: rust
55
rust:
6-
- nightly
6+
- nightly
77
cache:
88
cargo: true
99
directories:
@@ -15,31 +15,34 @@ addons:
1515
- curl
1616
- libpq-dev
1717
- pkg-config
18+
before_install:
19+
- rustup component add rustfmt
20+
- cargo fmt --all -- --check
1821
install:
19-
- export RUST_LOG=debug,hyper=info,rustc=error,cargo=error,jobserver=error
20-
- export GITHUB_WEBHOOK_SECRETS=none
21-
- export GITHUB_ACCESS_TOKEN=
22-
- export GITHUB_SCRAPE_INTERVAL=6000
23-
- export GITHUB_USER_AGENT=none-agent-with-left-beef
24-
- export POST_COMMENTS=false
25-
- export RUST_BACKTRACE=1
26-
- export PATH=$PATH:$HOME/.cargo/bin
27-
- export DATABASE_URL=postgres://localhost/dashboard
28-
- export DATABASE_POOL_SIZE=5
29-
- rustup default $(cat rust-toolchain)
30-
- cargo install cargo-update || true
31-
- cargo install-update-config diesel_cli --default-features false --feature postgres
32-
- cargo install-update -i diesel_cli
22+
- export RUST_LOG=debug,hyper=info,rustc=error,cargo=error,jobserver=error
23+
- export GITHUB_WEBHOOK_SECRETS=none
24+
- export GITHUB_ACCESS_TOKEN=
25+
- export GITHUB_SCRAPE_INTERVAL=6000
26+
- export GITHUB_USER_AGENT=none-agent-with-left-beef
27+
- export POST_COMMENTS=false
28+
- export RUST_BACKTRACE=1
29+
- export PATH=$PATH:$HOME/.cargo/bin
30+
- export DATABASE_URL=postgres://localhost/dashboard
31+
- export DATABASE_POOL_SIZE=5
32+
- rustup default $(cat rust-toolchain)
33+
- cargo install cargo-update || true
34+
- cargo install-update-config diesel_cli --default-features false --feature postgres
35+
- cargo install-update -i diesel_cli
3336
before_script:
34-
- diesel setup
35-
- diesel migration run
36-
- psql -q -d $DATABASE_URL < githubuser-backup.pg
37+
- diesel setup
38+
- diesel migration run
39+
- psql -q -d $DATABASE_URL < githubuser-backup.pg
3740
script:
38-
- cargo build
39-
- cargo test
41+
- cargo build
42+
- cargo test
4043
before_deploy:
41-
- DATABASE_URL=$(heroku config:get DATABASE_URL -a rfcbot-rs) diesel migration run
44+
- DATABASE_URL=$(heroku config:get DATABASE_URL -a rfcbot-rs) diesel migration run
4245
deploy:
43-
provider: heroku
44-
api_key:
45-
secure: "HgoF2n6pnzUhZ4oBNRo3gSoxj/o9x7QW0fKaj9sEYDNkTgak9IX3ha0antoYKB7dMw7IvB8oqVAHqldlGlG/kxUDKN8kaYA4O7CZgKP0auykTkmH61FP7MKV0hV0vyFfJLSraClVw/DL5tjYybs/rSBWFAvwttIB7QlcFv5sLIuwe5nHzftFfxba0H1+0oynO55FlmRzFBWydlFVNTsGVRddFHPgG3f2V9YYIjT18MKvn+BcV7WUq6QweB/3RjwTDYzws9mcWt6i3ju1D4nDTDVbtzeLs7Yt+NhnvrDeB+2FWSrV03LCjnnzv2zCUaROU522Vns0ZC/+2b9V3YbcCBJRA8QLBdo12JUCUzrdk8DOqIYKwlW6WbI3DcgRGz9Li7Dw2466+lsPidBfcdxnziTvc+r/2r6dKv3q7nwDpoZEyxnnTumPf50PJZs/lDu99Vo5C2NIdN2O5sqDHd8AJTLtWrY3FdN5MGKqTm0R9rVnObvUkTKbZJrDn39O5MU3mnHINUT+6ZCnNgra7rNn7Ai3tA1RKPB7kIZo41Td8j56iSLANXziCh0MikPfQv4x1scl3c1h5bR8ZqxVqH4IODjDJrJv43+hsbE9xGsmrC4OaqImLUpl/8zi6a4zZ8Na1AiFKVcdLZ9xt40/2yg7ysn53SCpQ5vty/j36clNwAE="
46+
- provider: heroku
47+
api_key:
48+
secure: "HgoF2n6pnzUhZ4oBNRo3gSoxj/o9x7QW0fKaj9sEYDNkTgak9IX3ha0antoYKB7dMw7IvB8oqVAHqldlGlG/kxUDKN8kaYA4O7CZgKP0auykTkmH61FP7MKV0hV0vyFfJLSraClVw/DL5tjYybs/rSBWFAvwttIB7QlcFv5sLIuwe5nHzftFfxba0H1+0oynO55FlmRzFBWydlFVNTsGVRddFHPgG3f2V9YYIjT18MKvn+BcV7WUq6QweB/3RjwTDYzws9mcWt6i3ju1D4nDTDVbtzeLs7Yt+NhnvrDeB+2FWSrV03LCjnnzv2zCUaROU522Vns0ZC/+2b9V3YbcCBJRA8QLBdo12JUCUzrdk8DOqIYKwlW6WbI3DcgRGz9Li7Dw2466+lsPidBfcdxnziTvc+r/2r6dKv3q7nwDpoZEyxnnTumPf50PJZs/lDu99Vo5C2NIdN2O5sqDHd8AJTLtWrY3FdN5MGKqTm0R9rVnObvUkTKbZJrDn39O5MU3mnHINUT+6ZCnNgra7rNn7Ai3tA1RKPB7kIZo41Td8j56iSLANXziCh0MikPfQv4x1scl3c1h5bR8ZqxVqH4IODjDJrJv43+hsbE9xGsmrC4OaqImLUpl/8zi6a4zZ8Na1AiFKVcdLZ9xt40/2yg7ysn53SCpQ5vty/j36clNwAE="

‎rustfmt.toml

-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,2 @@
1-
struct_lit_width = 24
21
fn_single_line = true
32
format_strings = false

‎src/config.rs

+28-25
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,11 @@ lazy_static! {
1212
Ok(c) => {
1313
info!("Configuration parsed from environment variables.");
1414
c
15-
},
15+
}
1616
Err(missing) => {
1717
error!("Unable to load environment variables {:?}", missing);
1818
panic!("Unable to load environment variables {:?}", missing);
19-
},
19+
}
2020
}
2121
};
2222
}
@@ -34,8 +34,9 @@ pub struct Config {
3434

3535
impl Config {
3636
pub fn check(&self) -> bool {
37-
!self.db_url.is_empty() && !self.github_access_token.is_empty() &&
38-
!self.github_user_agent.is_empty()
37+
!self.db_url.is_empty()
38+
&& !self.github_access_token.is_empty()
39+
&& !self.github_user_agent.is_empty()
3940
}
4041
}
4142

@@ -50,22 +51,24 @@ const POST_COMMENTS: &'static str = "POST_COMMENTS";
5051
// this is complex, but we'll shortly need a lot more config items
5152
// so checking them automagically seems like a nice solution
5253
pub fn init() -> Result<Config, Vec<&'static str>> {
53-
5454
let mut vars: BTreeMap<&'static str, Result<String, _>> = BTreeMap::new();
55-
let keys = vec![DB_URL,
56-
DB_POOL_SIZE,
57-
GITHUB_TOKEN,
58-
GITHUB_WEBHOOK_SECRETS,
59-
GITHUB_UA,
60-
POST_COMMENTS];
55+
let keys = vec![
56+
DB_URL,
57+
DB_POOL_SIZE,
58+
GITHUB_TOKEN,
59+
GITHUB_WEBHOOK_SECRETS,
60+
GITHUB_UA,
61+
POST_COMMENTS,
62+
];
6163

6264
for var in keys {
6365
vars.insert(var, env::var(var));
6466
}
6567

6668
let all_found = vars.iter().all(|(_, v)| v.is_ok());
6769
if all_found {
68-
let mut vars = vars.into_iter()
70+
let mut vars = vars
71+
.into_iter()
6972
.map(|(k, v)| (k, v.unwrap()))
7073
.collect::<BTreeMap<_, _>>();
7174

@@ -89,19 +92,19 @@ pub fn init() -> Result<Config, Vec<&'static str>> {
8992
let webhook_secrets = webhook_secrets.split(',').map(String::from).collect();
9093

9194
Ok(Config {
92-
db_url: db_url,
93-
db_pool_size: db_pool_size,
94-
github_access_token: gh_token,
95-
github_user_agent: gh_ua,
96-
github_webhook_secrets: webhook_secrets,
97-
github_interval_mins: gh_interval,
98-
post_comments: post_comments,
99-
})
100-
95+
db_url: db_url,
96+
db_pool_size: db_pool_size,
97+
github_access_token: gh_token,
98+
github_user_agent: gh_ua,
99+
github_webhook_secrets: webhook_secrets,
100+
github_interval_mins: gh_interval,
101+
post_comments: post_comments,
102+
})
101103
} else {
102-
Err(vars.iter()
103-
.filter(|&(_, v)| v.is_err())
104-
.map(|(&k, _)| k)
105-
.collect())
104+
Err(vars
105+
.iter()
106+
.filter(|&(_, v)| v.is_err())
107+
.map(|(&k, _)| k)
108+
.collect())
106109
}
107110
}

‎src/domain/github.rs

+56-20
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ use chrono::NaiveDateTime;
55
use super::schema::*;
66

77
#[derive(AsChangeset, Clone, Debug, Queryable)]
8-
#[table_name="githubsync"]
8+
#[table_name = "githubsync"]
99
pub struct GitHubSync {
1010
pub id: i32,
1111
pub successful: bool,
@@ -14,24 +14,36 @@ pub struct GitHubSync {
1414
}
1515

1616
#[derive(Clone, Debug, Insertable)]
17-
#[table_name="githubsync"]
17+
#[table_name = "githubsync"]
1818
pub struct GitHubSyncPartial {
1919
pub successful: bool,
2020
pub ran_at: NaiveDateTime,
2121
pub message: Option<String>,
2222
}
2323

24-
#[derive(AsChangeset, Clone, Debug, Deserialize, Eq, Insertable,
25-
Ord, PartialEq, PartialOrd, Queryable, Serialize)]
26-
#[table_name="githubuser"]
24+
#[derive(
25+
AsChangeset,
26+
Clone,
27+
Debug,
28+
Deserialize,
29+
Eq,
30+
Insertable,
31+
Ord,
32+
PartialEq,
33+
PartialOrd,
34+
Queryable,
35+
Serialize,
36+
)]
37+
#[table_name = "githubuser"]
2738
pub struct GitHubUser {
2839
pub id: i32,
2940
pub login: String,
3041
}
3142

32-
#[derive(AsChangeset, Clone, Debug, Deserialize, Eq, Insertable,
33-
Ord, PartialEq, PartialOrd, Queryable)]
34-
#[table_name="milestone"]
43+
#[derive(
44+
AsChangeset, Clone, Debug, Deserialize, Eq, Insertable, Ord, PartialEq, PartialOrd, Queryable,
45+
)]
46+
#[table_name = "milestone"]
3547
#[changeset_options(treat_none_as_null = "true")]
3648
pub struct Milestone {
3749
pub id: i32,
@@ -49,9 +61,20 @@ pub struct Milestone {
4961
pub repository: String,
5062
}
5163

52-
#[derive(AsChangeset, Clone, Debug, Deserialize, Eq, Insertable,
53-
Ord, PartialEq, PartialOrd, Queryable, Serialize)]
54-
#[table_name="issue"]
64+
#[derive(
65+
AsChangeset,
66+
Clone,
67+
Debug,
68+
Deserialize,
69+
Eq,
70+
Insertable,
71+
Ord,
72+
PartialEq,
73+
PartialOrd,
74+
Queryable,
75+
Serialize,
76+
)]
77+
#[table_name = "issue"]
5578
pub struct IssuePartial {
5679
pub number: i32,
5780
pub fk_milestone: Option<i32>,
@@ -91,9 +114,10 @@ impl IssuePartial {
91114
}
92115
}
93116

94-
#[derive(AsChangeset, Clone, Debug, Deserialize, Eq, Ord,
95-
PartialEq, PartialOrd, Queryable, Serialize)]
96-
#[table_name="issue"]
117+
#[derive(
118+
AsChangeset, Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Queryable, Serialize,
119+
)]
120+
#[table_name = "issue"]
97121
#[changeset_options(treat_none_as_null = "true")]
98122
pub struct Issue {
99123
pub id: i32,
@@ -113,9 +137,20 @@ pub struct Issue {
113137
pub repository: String,
114138
}
115139

116-
#[derive(AsChangeset, Clone, Debug, Deserialize, Eq, Insertable,
117-
Ord, PartialEq, PartialOrd, Queryable, Serialize)]
118-
#[table_name="issuecomment"]
140+
#[derive(
141+
AsChangeset,
142+
Clone,
143+
Debug,
144+
Deserialize,
145+
Eq,
146+
Insertable,
147+
Ord,
148+
PartialEq,
149+
PartialOrd,
150+
Queryable,
151+
Serialize,
152+
)]
153+
#[table_name = "issuecomment"]
119154
#[changeset_options(treat_none_as_null = "true")]
120155
pub struct IssueComment {
121156
pub id: i32,
@@ -127,9 +162,10 @@ pub struct IssueComment {
127162
pub repository: String,
128163
}
129164

130-
#[derive(AsChangeset, Clone, Debug, Deserialize, Eq, Insertable,
131-
Ord, PartialEq, PartialOrd, Queryable)]
132-
#[table_name="pullrequest"]
165+
#[derive(
166+
AsChangeset, Clone, Debug, Deserialize, Eq, Insertable, Ord, PartialEq, PartialOrd, Queryable,
167+
)]
168+
#[table_name = "pullrequest"]
133169
#[changeset_options(treat_none_as_null = "true")]
134170
pub struct PullRequest {
135171
pub number: i32,

‎src/domain/rfcbot.rs

+25-21
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ use chrono::NaiveDateTime;
33
use super::schema::*;
44

55
#[derive(Clone, Debug, Eq, Ord, Insertable, PartialEq, PartialOrd)]
6-
#[table_name="poll"]
6+
#[table_name = "poll"]
77
pub struct NewPoll<'a> {
88
pub fk_issue: i32,
99
pub fk_initiator: i32,
@@ -15,9 +15,10 @@ pub struct NewPoll<'a> {
1515
pub poll_teams: &'a str,
1616
}
1717

18-
#[derive(AsChangeset, Clone, Debug, Deserialize, Eq, Ord,
19-
PartialEq, PartialOrd, Queryable, Serialize)]
20-
#[table_name="poll"]
18+
#[derive(
19+
AsChangeset, Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Queryable, Serialize,
20+
)]
21+
#[table_name = "poll"]
2122
pub struct Poll {
2223
pub id: i32,
2324
pub fk_issue: i32,
@@ -31,7 +32,7 @@ pub struct Poll {
3132
}
3233

3334
#[derive(Clone, Debug, Eq, Ord, Insertable, PartialEq, PartialOrd)]
34-
#[table_name="fcp_proposal"]
35+
#[table_name = "fcp_proposal"]
3536
pub struct NewFcpProposal<'a> {
3637
pub fk_issue: i32,
3738
pub fk_initiator: i32,
@@ -43,27 +44,29 @@ pub struct NewFcpProposal<'a> {
4344
}
4445

4546
#[derive(Clone, Debug, Eq, Insertable, Ord, PartialEq, PartialOrd, Serialize)]
46-
#[table_name="poll_response_request"]
47+
#[table_name = "poll_response_request"]
4748
pub struct NewPollResponseRequest {
4849
pub fk_poll: i32,
4950
pub fk_respondent: i32,
5051
pub responded: bool,
5152
}
5253

53-
#[derive(AsChangeset, Clone, Debug, Deserialize, Eq, Ord,
54-
PartialEq, PartialOrd, Queryable, Serialize)]
55-
#[table_name="poll_response_request"]
54+
#[derive(
55+
AsChangeset, Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Queryable, Serialize,
56+
)]
57+
#[table_name = "poll_response_request"]
5658
pub struct PollResponseRequest {
5759
pub id: i32,
5860
pub fk_poll: i32,
5961
pub fk_respondent: i32,
6062
pub responded: bool,
6163
}
6264

63-
#[derive(AsChangeset, Clone, Debug, Deserialize, Eq, Ord,
64-
PartialEq, PartialOrd, Queryable, Serialize)]
65-
#[table_name="fcp_proposal"]
66-
#[changeset_options(treat_none_as_null="true")]
65+
#[derive(
66+
AsChangeset, Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Queryable, Serialize,
67+
)]
68+
#[table_name = "fcp_proposal"]
69+
#[changeset_options(treat_none_as_null = "true")]
6770
pub struct FcpProposal {
6871
pub id: i32,
6972
pub fk_issue: i32,
@@ -76,16 +79,17 @@ pub struct FcpProposal {
7679
}
7780

7881
#[derive(Clone, Debug, Eq, Insertable, Ord, PartialEq, PartialOrd, Serialize)]
79-
#[table_name="fcp_review_request"]
82+
#[table_name = "fcp_review_request"]
8083
pub struct NewFcpReviewRequest {
8184
pub fk_proposal: i32,
8285
pub fk_reviewer: i32,
8386
pub reviewed: bool,
8487
}
8588

86-
#[derive(AsChangeset, Clone, Debug, Deserialize, Eq, Ord,
87-
PartialEq, PartialOrd, Queryable, Serialize)]
88-
#[table_name="fcp_review_request"]
89+
#[derive(
90+
AsChangeset, Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Queryable, Serialize,
91+
)]
92+
#[table_name = "fcp_review_request"]
8993
pub struct FcpReviewRequest {
9094
pub id: i32,
9195
pub fk_proposal: i32,
@@ -94,7 +98,7 @@ pub struct FcpReviewRequest {
9498
}
9599

96100
#[derive(Clone, Debug, Eq, Insertable, Ord, PartialEq, PartialOrd)]
97-
#[table_name="fcp_concern"]
101+
#[table_name = "fcp_concern"]
98102
pub struct NewFcpConcern<'a> {
99103
pub fk_proposal: i32,
100104
pub fk_initiator: i32,
@@ -104,7 +108,7 @@ pub struct NewFcpConcern<'a> {
104108
}
105109

106110
#[derive(AsChangeset, Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Queryable)]
107-
#[table_name="fcp_concern"]
111+
#[table_name = "fcp_concern"]
108112
pub struct FcpConcern {
109113
pub id: i32,
110114
pub fk_proposal: i32,
@@ -115,7 +119,7 @@ pub struct FcpConcern {
115119
}
116120

117121
#[derive(Clone, Debug, Eq, Insertable, Ord, PartialEq, PartialOrd)]
118-
#[table_name="rfc_feedback_request"]
122+
#[table_name = "rfc_feedback_request"]
119123
pub struct NewFeedbackRequest {
120124
pub fk_initiator: i32,
121125
pub fk_requested: i32,
@@ -124,7 +128,7 @@ pub struct NewFeedbackRequest {
124128
}
125129

126130
#[derive(AsChangeset, Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Queryable)]
127-
#[table_name="rfc_feedback_request"]
131+
#[table_name = "rfc_feedback_request"]
128132
pub struct FeedbackRequest {
129133
pub id: i32,
130134
pub fk_initiator: i32,

‎src/domain/schema.rs

-1
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,6 @@ joinable!(poll -> issue (fk_issue));
171171
joinable!(poll_response_request -> poll (fk_poll));
172172
joinable!(poll_response_request -> githubuser (fk_respondent));
173173

174-
175174
allow_tables_to_appear_in_same_query!(fcp_concern, githubuser);
176175
allow_tables_to_appear_in_same_query!(fcp_concern, fcp_proposal);
177176
allow_tables_to_appear_in_same_query!(fcp_proposal, githubuser);

‎src/error.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ use std::convert::From;
55
use std::io;
66

77
use diesel;
8-
use serde_json;
98
use rocket_contrib::templates::handlebars;
9+
use serde_json;
1010

1111
pub type DashResult<T> = std::result::Result<T, DashError>;
1212

‎src/github/client.rs

+50-41
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,14 @@ use std::time::Duration;
66
use std::u32;
77

88
use chrono::{DateTime, Utc};
9+
use reqwest::{self, header::HeaderMap, Response, StatusCode};
910
use serde::de::DeserializeOwned;
1011
use serde_json;
11-
use reqwest::{self, StatusCode, Response, header::HeaderMap};
1212

1313
use config::CONFIG;
14+
use domain::github::GitHubUser;
1415
use error::{DashError, DashResult};
1516
use github::models::{CommentFromJson, IssueFromJson, PullRequestFromJson, PullRequestUrls};
16-
use domain::github::GitHubUser;
1717

1818
pub const BASE_URL: &'static str = "https://api.github.com";
1919

@@ -36,15 +36,20 @@ impl Client {
3636
if !CONFIG.github_access_token.trim().is_empty() {
3737
headers.insert(
3838
"Authorization",
39-
format!("token {}", CONFIG.github_access_token).parse().unwrap(),
39+
format!("token {}", CONFIG.github_access_token)
40+
.parse()
41+
.unwrap(),
4042
);
4143
}
4244
headers.insert("User-Agent", CONFIG.github_user_agent.parse().unwrap());
4345
headers.insert("Time-Zone", "UTC".parse().unwrap());
4446
headers.insert("Accept", "application/vnd.github.v3".parse().unwrap());
4547
headers.insert("Connection", "close".parse().unwrap());
4648
Client {
47-
client: reqwest::Client::builder().default_headers(headers).build().unwrap(),
49+
client: reqwest::Client::builder()
50+
.default_headers(headers)
51+
.build()
52+
.unwrap(),
4853
rate_limit: u32::MAX,
4954
rate_limit_timeout: Utc::now(),
5055
}
@@ -65,39 +70,43 @@ impl Client {
6570
}
6671
}
6772
throw!(DashError::Misc(None))
68-
6973
}
7074
Ok(repos)
7175
}
7276

7377
pub fn issues_since(&self, repo: &str, start: DateTime<Utc>) -> DashResult<Vec<IssueFromJson>> {
74-
self.get_models(&format!("{}/repos/{}/issues", BASE_URL, repo),
78+
self.get_models(
79+
&format!("{}/repos/{}/issues", BASE_URL, repo),
7580
Some(&btreemap! {
7681
"state" => "all".to_string(),
7782
"since" => format!("{:?}", start),
7883
"per_page" => format!("{}", PER_PAGE),
79-
"direction" => "asc".to_string()
80-
}))
81-
}
82-
83-
pub fn comments_since(&self,
84-
repo: &str,
85-
start: DateTime<Utc>)
86-
-> DashResult<Vec<CommentFromJson>> {
87-
self.get_models(&format!("{}/repos/{}/issues/comments", BASE_URL, repo),
84+
"direction" => "asc".to_string()
85+
}),
86+
)
87+
}
88+
89+
pub fn comments_since(
90+
&self,
91+
repo: &str,
92+
start: DateTime<Utc>,
93+
) -> DashResult<Vec<CommentFromJson>> {
94+
self.get_models(
95+
&format!("{}/repos/{}/issues/comments", BASE_URL, repo),
8896
Some(&btreemap! {
8997
"sort" => "created".to_string(),
9098
"direction" => "asc".to_string(),
9199
"since" => format!("{:?}", start),
92100
"per_page" => format!("{}", PER_PAGE)
93-
}))
101+
}),
102+
)
94103
}
95104

96-
fn get_models<M: DeserializeOwned>(&self,
97-
start_url: &str,
98-
params: Option<&ParameterMap>)
99-
-> DashResult<Vec<M>> {
100-
105+
fn get_models<M: DeserializeOwned>(
106+
&self,
107+
start_url: &str,
108+
params: Option<&ParameterMap>,
109+
) -> DashResult<Vec<M>> {
101110
let mut res = self.get(start_url, params)?;
102111
let mut models: Vec<M> = res.json()?;
103112
while let Some(url) = Self::next_page(res.headers()) {
@@ -120,7 +129,6 @@ impl Client {
120129
if let Some(lh) = h.get("Link") {
121130
let lh = &lh.to_str().unwrap();
122131
for link in (**lh).split(',').map(|s| s.trim()) {
123-
124132
let tokens = link.split(';').map(|s| s.trim()).collect::<Vec<_>>();
125133

126134
if tokens.len() != 2 {
@@ -166,11 +174,10 @@ impl Client {
166174
}
167175

168176
pub fn remove_label(&self, repo: &str, issue_num: i32, label: &str) -> DashResult<()> {
169-
let url = format!("{}/repos/{}/issues/{}/labels/{}",
170-
BASE_URL,
171-
repo,
172-
issue_num,
173-
label);
177+
let url = format!(
178+
"{}/repos/{}/issues/{}/labels/{}",
179+
BASE_URL, repo, issue_num, label
180+
);
174181
let mut res = self.delete(&url)?;
175182

176183
if StatusCode::NO_CONTENT != res.status() {
@@ -180,25 +187,27 @@ impl Client {
180187
Ok(())
181188
}
182189

183-
pub fn new_comment(&self,
184-
repo: &str,
185-
issue_num: i32,
186-
text: &str)
187-
-> DashResult<CommentFromJson> {
190+
pub fn new_comment(
191+
&self,
192+
repo: &str,
193+
issue_num: i32,
194+
text: &str,
195+
) -> DashResult<CommentFromJson> {
188196
let url = format!("{}/repos/{}/issues/{}/comments", BASE_URL, repo, issue_num);
189197
let payload = serde_json::to_string(&btreemap!("body" => text))?;
190198
Ok(self.post(&url, &payload)?.error_for_status()?.json()?)
191199
}
192200

193-
pub fn edit_comment(&self,
194-
repo: &str,
195-
comment_num: i32,
196-
text: &str)
197-
-> DashResult<CommentFromJson> {
198-
let url = format!("{}/repos/{}/issues/comments/{}",
199-
BASE_URL,
200-
repo,
201-
comment_num);
201+
pub fn edit_comment(
202+
&self,
203+
repo: &str,
204+
comment_num: i32,
205+
text: &str,
206+
) -> DashResult<CommentFromJson> {
207+
let url = format!(
208+
"{}/repos/{}/issues/comments/{}",
209+
BASE_URL, repo, comment_num
210+
);
202211
let payload = serde_json::to_string(&btreemap!("body" => text))?;
203212
Ok(self.patch(&url, &payload)?.error_for_status()?.json()?)
204213
}

‎src/github/command.rs

+319-149
Large diffs are not rendered by default.

‎src/github/mod.rs

+27-20
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,20 @@
11
// Copyright 2016 Adam Perry. Dual-licensed MIT and Apache 2.0 (see LICENSE files for details).
22

3-
43
pub mod client;
5-
pub mod models;
64
mod command;
5+
pub mod models;
76
mod nag;
87
pub mod webhooks;
98

109
use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};
11-
use diesel::prelude::*;
12-
use diesel::pg::PgConnection;
1310
use diesel;
11+
use diesel::pg::PgConnection;
12+
use diesel::prelude::*;
1413

15-
use DB_POOL;
1614
use domain::github::*;
1715
use domain::schema::*;
1816
use error::DashResult;
17+
use DB_POOL;
1918

2019
use self::client::Client;
2120
use self::models::{CommentFromJson, IssueFromJson, PullRequestFromJson};
@@ -27,8 +26,10 @@ lazy_static! {
2726
pub fn most_recent_update() -> DashResult<DateTime<Utc>> {
2827
info!("finding most recent github updates");
2928

30-
let default_date = NaiveDateTime::new(NaiveDate::from_ymd(2015, 5, 15),
31-
NaiveTime::from_hms(0, 0, 0));
29+
let default_date = NaiveDateTime::new(
30+
NaiveDate::from_ymd(2015, 5, 15),
31+
NaiveTime::from_hms(0, 0, 0),
32+
);
3233

3334
let conn = &*DB_POOL.get()?;
3435

@@ -55,7 +56,9 @@ pub fn record_successful_update(ingest_start: NaiveDateTime) -> DashResult<()> {
5556
message: None,
5657
};
5758

58-
diesel::insert_into(githubsync).values(&sync_record).execute(conn)?;
59+
diesel::insert_into(githubsync)
60+
.values(&sync_record)
61+
.execute(conn)?;
5962
Ok(())
6063
}
6164

@@ -77,19 +80,22 @@ pub fn ingest_since(repo: &str, start: DateTime<Utc>) -> DashResult<()> {
7780
}
7881
}
7982

80-
debug!("num pull requests updated since {}: {:#?}",
81-
&start,
82-
prs.len());
83+
debug!(
84+
"num pull requests updated since {}: {:#?}",
85+
&start,
86+
prs.len()
87+
);
8388

8489
debug!("num issues updated since {}: {:?}", &start, issues.len());
85-
debug!("num comments updated since {}: {:?}",
86-
&start,
87-
comments.len());
90+
debug!(
91+
"num comments updated since {}: {:?}",
92+
&start,
93+
comments.len()
94+
);
8895

8996
let conn = &*DB_POOL.get()?;
9097
debug!("let's insert some stuff in the database");
9198

92-
9399
// make sure we have all of the users to ensure referential integrity
94100
for issue in issues {
95101
let issue_number = issue.number;
@@ -141,9 +147,10 @@ pub fn handle_comment(conn: &PgConnection, comment: CommentFromJson, repo: &str)
141147
// We only want to run `nag::update_nags` on insert to avoid
142148
// double-processing commits, so we can't use upsert here
143149
if issuecomment::table
144-
.find(comment.id)
145-
.get_result::<IssueComment>(conn)
146-
.is_ok() {
150+
.find(comment.id)
151+
.get_result::<IssueComment>(conn)
152+
.is_ok()
153+
{
147154
diesel::update(issuecomment::table.find(comment.id))
148155
.set(&comment)
149156
.execute(conn)?;
@@ -216,8 +223,8 @@ mod tests {
216223
fn test_handle_user() {
217224
::utils::setup_test_env();
218225
let db_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
219-
let conn = PgConnection::establish(&db_url)
220-
.expect(&format!("Error connecting to {}", db_url));
226+
let conn =
227+
PgConnection::establish(&db_url).expect(&format!("Error connecting to {}", db_url));
221228

222229
let user = GitHubUser {
223230
id: -1,

‎src/github/models.rs

+19-16
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@ use std::i32;
55

66
use chrono::{DateTime, Utc};
77

8-
use DB_POOL;
9-
use domain::github::{IssueComment, IssuePartial, Milestone, PullRequest, GitHubUser};
8+
use domain::github::{GitHubUser, IssueComment, IssuePartial, Milestone, PullRequest};
109
use error::DashResult;
10+
use DB_POOL;
1111

1212
#[derive(Clone, Debug, Deserialize)]
1313
pub struct MilestoneFromJson {
@@ -82,7 +82,8 @@ impl IssueFromJson {
8282
open: self.state == "open",
8383
is_pull_request: self.pull_request.is_some(),
8484
title: self.title.replace(0x00 as char, ""),
85-
body: self.body
85+
body: self
86+
.body
8687
.unwrap_or_else(String::new)
8788
.replace(0x00 as char, ""),
8889
locked: self.locked,
@@ -115,7 +116,8 @@ impl CommentFromJson {
115116
use diesel::prelude::*;
116117
use domain::schema::issue::dsl::*;
117118

118-
let issue_number = self.html_url
119+
let issue_number = self
120+
.html_url
119121
.split('#')
120122
.next()
121123
.map(|r| r.split('/').last().map(|n| n.parse::<i32>()));
@@ -131,20 +133,21 @@ impl CommentFromJson {
131133

132134
let conn = DB_POOL.get()?;
133135

134-
let issue_id = issue.select(id)
135-
.filter(number.eq(issue_number))
136-
.filter(repository.eq(repo))
137-
.first::<i32>(&*conn)?;
136+
let issue_id = issue
137+
.select(id)
138+
.filter(number.eq(issue_number))
139+
.filter(repository.eq(repo))
140+
.first::<i32>(&*conn)?;
138141

139142
Ok(IssueComment {
140-
id: self.id,
141-
fk_issue: issue_id,
142-
fk_user: self.user.id,
143-
body: self.body.replace(0x00 as char, ""),
144-
created_at: self.created_at.naive_utc(),
145-
updated_at: self.updated_at.naive_utc(),
146-
repository: repo.to_string(),
147-
})
143+
id: self.id,
144+
fk_issue: issue_id,
145+
fk_user: self.user.id,
146+
body: self.body.replace(0x00 as char, ""),
147+
created_at: self.created_at.naive_utc(),
148+
updated_at: self.updated_at.naive_utc(),
149+
repository: repo.to_string(),
150+
})
148151
}
149152
}
150153

‎src/github/nag.rs

+253-187
Large diffs are not rendered by default.

‎src/github/webhooks.rs

+45-36
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,10 @@ use crypto::mac::Mac;
55
use crypto::mac::MacResult;
66
use crypto::sha1::Sha1;
77
use hex::FromHex;
8-
use rocket::http::Status;
98
use rocket::data::{self, Data, FromDataSimple};
10-
use rocket::request::Request;
9+
use rocket::http::Status;
1110
use rocket::outcome::Outcome::*;
11+
use rocket::request::Request;
1212
use serde_json;
1313

1414
use config::CONFIG;
@@ -59,13 +59,17 @@ impl FromDataSimple for Event {
5959
Ok(p) => p,
6060
Err(DashError::Serde(why)) => {
6161
info!("failed to parse webhook payload: {:?}", why);
62-
return Failure((Status::BadRequest,
63-
"failed to deserialize request payload"));
62+
return Failure((
63+
Status::BadRequest,
64+
"failed to deserialize request payload",
65+
));
6466
}
6567
Err(why) => {
6668
error!("non-json-parsing error with webhook payload: {:?}", why);
67-
return Failure((Status::InternalServerError,
68-
"unknown failure, check the logs"));
69+
return Failure((
70+
Status::InternalServerError,
71+
"unknown failure, check the logs",
72+
));
6973
}
7074
};
7175

@@ -75,17 +79,21 @@ impl FromDataSimple for Event {
7579
payload: payload,
7680
};
7781

78-
info!("Received valid webhook ({} id {})",
79-
full_event.event_name,
80-
full_event.delivery_id);
82+
info!(
83+
"Received valid webhook ({} id {})",
84+
full_event.event_name, full_event.delivery_id
85+
);
8186

8287
return Success(full_event);
8388
}
8489
}
8590

8691
warn!("Received invalid webhook: {:?}", request);
8792
warn!("Invalid webhook body: `{}`", body);
88-
warn!("Tried {} webhook secrets", CONFIG.github_webhook_secrets.len());
93+
warn!(
94+
"Tried {} webhook secrets",
95+
CONFIG.github_webhook_secrets.len()
96+
);
8997
Failure((Status::Forbidden, "unable to authenticate webhook"))
9098
}
9199
}
@@ -109,42 +117,43 @@ fn parse_event(event_name: &str, body: &str) -> DashResult<Payload> {
109117
"issues" => Ok(Payload::Issues(serde_json::from_str(body)?)),
110118
"pull_request" => Ok(Payload::PullRequest(serde_json::from_str(body)?)),
111119

112-
"commit_comment" |
113-
"create" |
114-
"delete" |
115-
"deployment" |
116-
"deployment_status" |
117-
"fork" |
118-
"gollum" |
119-
"label" |
120-
"member" |
121-
"membership" |
122-
"milestone" |
123-
"organization" |
124-
"page_build" |
125-
"public" |
126-
"pull_request_review_comment" |
127-
"pull_request_review" |
128-
"push" |
129-
"repository" |
130-
"release" |
131-
"status" |
132-
"team" |
133-
"team_add" |
134-
"watch" => {
120+
"commit_comment"
121+
| "create"
122+
| "delete"
123+
| "deployment"
124+
| "deployment_status"
125+
| "fork"
126+
| "gollum"
127+
| "label"
128+
| "member"
129+
| "membership"
130+
| "milestone"
131+
| "organization"
132+
| "page_build"
133+
| "public"
134+
| "pull_request_review_comment"
135+
| "pull_request_review"
136+
| "push"
137+
| "repository"
138+
| "release"
139+
| "status"
140+
| "team"
141+
| "team_add"
142+
| "watch" => {
135143
info!("Received {} event, ignoring...", event_name);
136144
Ok(Payload::Unsupported)
137145
}
138146

139147
_ => {
140-
warn!("Received unrecognized event {}, check GitHub's API to see what's updated.",
141-
event_name);
148+
warn!(
149+
"Received unrecognized event {}, check GitHub's API to see what's updated.",
150+
event_name
151+
);
142152
Ok(Payload::Unsupported)
143153
}
144154
}
145155
}
146156

147-
148157
#[derive(Debug)]
149158
pub enum Payload {
150159
Issues(IssuesEvent),

‎src/main.rs

+11-12
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,8 @@ mod utils;
4343

4444
use chrono::Local;
4545
use diesel::pg::PgConnection;
46-
use diesel::r2d2::Pool;
4746
use diesel::r2d2::ConnectionManager;
47+
use diesel::r2d2::Pool;
4848
use env_logger::LogBuilder;
4949
use log::LogRecord;
5050

@@ -57,12 +57,14 @@ fn main() {
5757
LogBuilder::new()
5858
.format(|rec: &LogRecord| {
5959
let loc = rec.location();
60-
format!("[{} {}:{} {}] {}",
61-
rec.level(),
62-
loc.module_path(),
63-
loc.line(),
64-
Local::now().format("%Y-%m-%d %H:%M:%S"),
65-
rec.args())
60+
format!(
61+
"[{} {}:{} {}] {}",
62+
rec.level(),
63+
loc.module_path(),
64+
loc.line(),
65+
Local::now().format("%Y-%m-%d %H:%M:%S"),
66+
rec.args()
67+
)
6668
})
6769
.parse(&std::env::var("RUST_LOG").unwrap_or_else(|_| "info".to_string()))
6870
.init()
@@ -96,14 +98,11 @@ lazy_static! {
9698

9799
let manager = ConnectionManager::<PgConnection>::new(CONFIG.db_url.clone());
98100

99-
match Pool::builder()
100-
.max_size(CONFIG.db_pool_size)
101-
.build(manager)
102-
{
101+
match Pool::builder().max_size(CONFIG.db_pool_size).build(manager) {
103102
Ok(p) => {
104103
info!("DB connection pool established.");
105104
p
106-
},
105+
}
107106
Err(why) => {
108107
error!("Failed to establish DB connection pool: {}", why);
109108
panic!("Error creating connection pool.");

‎src/nag.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
use diesel::prelude::*;
22

3-
use DB_POOL;
43
use domain::github::{GitHubUser, Issue, IssueComment};
54
use domain::rfcbot::{FcpProposal, FcpReviewRequest};
65
use error::DashResult;
6+
use DB_POOL;
77

88
#[derive(Serialize)]
99
pub struct FcpWithInfo {
@@ -76,7 +76,7 @@ pub fn individual_nags(username: &str) -> DashResult<(GitHubUser, Vec<Individual
7676

7777
let review_requests = fcp_review_request::table
7878
.inner_join(fcp_proposal::table)
79-
.filter(fcp_proposal::fcp_start.is_null())
79+
.filter(fcp_proposal::fcp_start.is_null())
8080
.filter(fcp_review_request::fk_reviewer.eq(user.id))
8181
.filter(fcp_review_request::reviewed.eq(false))
8282
.load::<(FcpReviewRequest, FcpProposal)>(conn)?;

‎src/scraper.rs

+8-4
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,14 @@ use config::{CONFIG, GH_ORGS};
66
use github;
77

88
pub fn start_scraping() -> Option<JoinHandle<()>> {
9-
Some(::utils::spawn_thread("GitHub scraper", CONFIG.github_interval_mins?, || {
10-
scrape_github(github::most_recent_update()?);
11-
Ok(())
12-
}))
9+
Some(::utils::spawn_thread(
10+
"GitHub scraper",
11+
CONFIG.github_interval_mins?,
12+
|| {
13+
scrape_github(github::most_recent_update()?);
14+
Ok(())
15+
},
16+
))
1317
}
1418

1519
pub fn scrape_github(since: DateTime<Utc>) {

‎src/server.rs

+14-12
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
use std::panic::catch_unwind;
21
use rocket;
32
use rocket_contrib::templates::handlebars::Handlebars;
3+
use std::panic::catch_unwind;
44

55
pub fn serve() {
66
// in debug builds this will force an init, good enough for testing
@@ -24,11 +24,11 @@ pub fn serve() {
2424
}
2525

2626
mod html {
27-
use std::collections::BTreeMap;
28-
use rocket::response::content;
27+
use super::TEMPLATES;
2928
use error::DashResult;
3029
use nag;
31-
use super::TEMPLATES;
30+
use rocket::response::content;
31+
use std::collections::BTreeMap;
3232

3333
type Html = content::Html<String>;
3434

@@ -70,9 +70,9 @@ mod html {
7070
.into_iter()
7171
.map(|(team_label, fcps)| {
7272
json!({
73-
"team": team_label,
74-
"fcps": fcps,
75-
})
73+
"team": team_label,
74+
"fcps": fcps,
75+
})
7676
})
7777
.collect::<Vec<_>>();
7878

@@ -97,13 +97,13 @@ mod html {
9797
}
9898

9999
mod api {
100-
use rocket_contrib::json::Json;
101-
use DB_POOL;
102100
use domain::github::GitHubUser;
103101
use error::DashResult;
104-
use github::{handle_comment, handle_issue, handle_pr};
105102
use github::webhooks::{Event, Payload};
103+
use github::{handle_comment, handle_issue, handle_pr};
106104
use nag;
105+
use rocket_contrib::json::Json;
106+
use DB_POOL;
107107

108108
#[get("/all")]
109109
pub fn all_fcps() -> DashResult<Json<Vec<nag::FcpWithInfo>>> { Ok(Json(nag::all_fcps()?)) }
@@ -165,9 +165,11 @@ lazy_static! {
165165
let user_fcps_fragment = include_str!("templates/fcp-user.hbs");
166166
let user_fcps_template = root_template.replace("{{content}}", user_fcps_fragment);
167167

168-
hbars.register_template_string("all", &all_fcps_template)
168+
hbars
169+
.register_template_string("all", &all_fcps_template)
169170
.expect("unable to register all-fcps template");
170-
hbars.register_template_string("user", &user_fcps_template)
171+
hbars
172+
.register_template_string("user", &user_fcps_template)
171173
.expect("unable to register user fcps template");
172174

173175
hbars

‎src/teams.rs

+53-35
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@ const UPDATE_CONFIG_EVERY_MIN: u64 = 5;
1818
type TeamsMap = BTreeMap<TeamLabel, Team>;
1919

2020
lazy_static! {
21-
pub static ref SETUP: Arc<RwLock<RfcbotConfig>> = Arc::new(RwLock::new(read_rfcbot_cfg_validated()));
21+
pub static ref SETUP: Arc<RwLock<RfcbotConfig>> =
22+
Arc::new(RwLock::new(read_rfcbot_cfg_validated()));
2223
}
2324

2425
#[derive(Debug, Deserialize)]
@@ -33,9 +34,7 @@ pub struct RfcbotConfig {
3334

3435
impl RfcbotConfig {
3536
/// Retrive an iterator over all the team labels.
36-
pub fn team_labels(&self) -> impl Iterator<Item = &TeamLabel> {
37-
self.teams().map(|(k, _)| k)
38-
}
37+
pub fn team_labels(&self) -> impl Iterator<Item = &TeamLabel> { self.teams().map(|(k, _)| k) }
3938

4039
/// Retrive an iterator over all the (team label, team) pairs.
4140
pub fn teams(&self) -> impl Iterator<Item = (&TeamLabel, &Team)> {
@@ -47,12 +46,18 @@ impl RfcbotConfig {
4746

4847
/// Are we allowed to auto-close issues after F-FCP in this repo?
4948
pub fn should_ffcp_auto_close(&self, repo: &str) -> bool {
50-
self.fcp_behaviors.get(repo).map(|fcp| fcp.close).unwrap_or_default()
49+
self.fcp_behaviors
50+
.get(repo)
51+
.map(|fcp| fcp.close)
52+
.unwrap_or_default()
5153
}
5254

5355
/// Are we allowed to auto-postpone issues after F-FCP in this repo?
5456
pub fn should_ffcp_auto_postpone(&self, repo: &str) -> bool {
55-
self.fcp_behaviors.get(repo).map(|fcp| fcp.postpone).unwrap_or_default()
57+
self.fcp_behaviors
58+
.get(repo)
59+
.map(|fcp| fcp.postpone)
60+
.unwrap_or_default()
5661
}
5762

5863
// Update the list of teams from external sources, if needed
@@ -62,9 +67,7 @@ impl RfcbotConfig {
6267
teams: TeamsMap,
6368
}
6469
if let RfcbotTeams::Remote { ref url } = &self.teams {
65-
let de: ToDeserialize = ::reqwest::get(url)?
66-
.error_for_status()?
67-
.json()?;
70+
let de: ToDeserialize = ::reqwest::get(url)?.error_for_status()?.json()?;
6871
self.cached_teams = de.teams;
6972
}
7073
Ok(())
@@ -87,9 +90,7 @@ pub struct FcpBehavior {
8790
#[serde(untagged)]
8891
enum RfcbotTeams {
8992
Local(TeamsMap),
90-
Remote {
91-
url: String,
92-
}
93+
Remote { url: String },
9394
}
9495

9596
#[derive(Debug, Deserialize)]
@@ -100,9 +101,7 @@ pub struct Team {
100101
}
101102

102103
impl Team {
103-
pub fn ping(&self) -> &str {
104-
&self.ping
105-
}
104+
pub fn ping(&self) -> &str { &self.ping }
106105

107106
pub fn member_logins(&self) -> impl Iterator<Item = &str> {
108107
self.members.iter().map(|s| s.as_str())
@@ -132,19 +131,22 @@ pub fn start_updater_thread() {
132131
fn read_rfcbot_cfg_validated() -> RfcbotConfig {
133132
let cfg = read_rfcbot_cfg();
134133

135-
cfg.teams().map(|(_, v)| v).for_each(|team|
136-
team.validate()
137-
.expect("unable to verify team member from database.
138-
if you're running this for tests, make sure you've pulled github users from prod")
139-
);
134+
cfg.teams().map(|(_, v)| v).for_each(|team| {
135+
team.validate().expect(
136+
"unable to verify team member from database.
137+
if you're running this for tests, make sure you've pulled github users from prod",
138+
)
139+
});
140140

141141
cfg
142142
}
143143

144144
/// Read the unprocessed `rfcbot.toml` configuration file.
145145
fn read_rfcbot_cfg() -> RfcbotConfig {
146-
let mut config = read_rfcbot_cfg_from(
147-
include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/rfcbot.toml")));
146+
let mut config = read_rfcbot_cfg_from(include_str!(concat!(
147+
env!("CARGO_MANIFEST_DIR"),
148+
"/rfcbot.toml"
149+
)));
148150
config.update().expect("couldn't update the configuration!");
149151
config
150152
}
@@ -161,7 +163,11 @@ impl Team {
161163

162164
// bail if they don't exist, but we don't want to actually keep the id in ram
163165
for member_login in self.member_logins() {
164-
if githubuser.filter(login.eq(member_login)).first::<GitHubUser>(conn).is_err() {
166+
if githubuser
167+
.filter(login.eq(member_login))
168+
.first::<GitHubUser>(conn)
169+
.is_err()
170+
{
165171
::github::handle_user(&conn, &gh.get_user(member_login)?)?;
166172
info!("loaded into the database user {}", member_login);
167173
}
@@ -180,8 +186,8 @@ pub mod test {
180186
use super::*;
181187

182188
lazy_static! {
183-
pub static ref TEST_SETUP: RfcbotConfig =
184-
read_rfcbot_cfg_from(r#"
189+
pub static ref TEST_SETUP: RfcbotConfig = read_rfcbot_cfg_from(
190+
r#"
185191
[fcp_behaviors]
186192
187193
[fcp_behaviors."rust-lang/alpha"]
@@ -220,33 +226,45 @@ members = [
220226
"batman",
221227
"theflash"
222228
]
223-
"#);
229+
"#
230+
);
224231
}
225232

226233
#[test]
227234
fn setup_parser_correct() {
228235
let cfg = &*TEST_SETUP;
229236

230237
// Labels are correct:
231-
assert_eq!(cfg.team_labels().map(|tl| tl.0.clone()).collect::<Vec<_>>(),
232-
vec!["T-avengers", "justice-league"]);
238+
assert_eq!(
239+
cfg.team_labels().map(|tl| tl.0.clone()).collect::<Vec<_>>(),
240+
vec!["T-avengers", "justice-league"]
241+
);
233242

234243
// Teams are correct:
235-
let map: BTreeMap<_, _> =
236-
cfg.teams().map(|(k, v)| (k.0.clone(), v.clone())).collect();
244+
let map: BTreeMap<_, _> = cfg.teams().map(|(k, v)| (k.0.clone(), v.clone())).collect();
237245

238246
let avengers = map.get("T-avengers").unwrap();
239247
//assert_eq!(avengers.name, "The Avengers");
240248
//assert_eq!(avengers.ping, "marvel/avengers");
241-
assert_eq!(avengers.member_logins().collect::<Vec<_>>(),
242-
vec!["hulk", "thor", "thevision", "blackwidow",
243-
"spiderman", "captainamerica"]);
249+
assert_eq!(
250+
avengers.member_logins().collect::<Vec<_>>(),
251+
vec![
252+
"hulk",
253+
"thor",
254+
"thevision",
255+
"blackwidow",
256+
"spiderman",
257+
"captainamerica"
258+
]
259+
);
244260

245261
let jsa = map.get("justice-league").unwrap();
246262
//assert_eq!(jsa.name, "Justice League of America");
247263
//assert_eq!(jsa.ping, "dc-comics/justice-league");
248-
assert_eq!(jsa.member_logins().collect::<Vec<_>>(),
249-
vec!["superman", "wonderwoman", "aquaman", "batman", "theflash"]);
264+
assert_eq!(
265+
jsa.member_logins().collect::<Vec<_>>(),
266+
vec!["superman", "wonderwoman", "aquaman", "batman", "theflash"]
267+
);
250268

251269
// Random non-existent team does not exist:
252270
assert!(map.get("random").is_none());

‎src/utils.rs

+12-10
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,29 @@
1-
use std::time::Duration;
2-
use std::thread::{self, JoinHandle};
31
use error::DashResult;
2+
use std::thread::{self, JoinHandle};
3+
use std::time::Duration;
44

55
pub(crate) fn spawn_thread<F>(name: &'static str, interval_minutes: u64, f: F) -> JoinHandle<()>
66
where
77
F: Fn() -> DashResult<()> + Send + 'static,
88
{
99
let duration = Duration::from_secs(interval_minutes * 60);
10-
thread::spawn(move || {
11-
loop {
12-
if let Err(err) = f() {
13-
error!("the {} thread failed an iteration: {:?}", name, err);
14-
}
15-
info!("{} thread sleeping for {} seconds", name, duration.as_secs());
16-
thread::sleep(duration);
10+
thread::spawn(move || loop {
11+
if let Err(err) = f() {
12+
error!("the {} thread failed an iteration: {:?}", name, err);
1713
}
14+
info!(
15+
"{} thread sleeping for {} seconds",
16+
name,
17+
duration.as_secs()
18+
);
19+
thread::sleep(duration);
1820
})
1921
}
2022

2123
#[cfg(test)]
2224
pub(crate) fn setup_test_env() {
23-
use std::sync::Once;
2425
use std::path::Path;
26+
use std::sync::Once;
2527

2628
static ONCE: Once = Once::new();
2729
ONCE.call_once(|| {

0 commit comments

Comments
 (0)
Please sign in to comment.