Skip to content

Rollup of 8 pull requests #67356

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 22 commits into from
Dec 16, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
cc863f0
Improve `str` prefix/suffix comparison
ranma42 Dec 11, 2019
1f6d023
Prefer encoding the char when checking for string prefix/suffix
ranma42 Dec 11, 2019
de7fefa
Minor cleanup in `Pattern::{is_prefix_of,is_suffix_of}` for `char`
ranma42 Dec 12, 2019
2514cd5
Delete flaky test net::tcp::tests::fast_rebind
dtolnay Dec 15, 2019
f2d6413
comment -> doc comment
Centril Dec 15, 2019
d848ce0
document check_pat_slice
Centril Dec 15, 2019
66bb978
improve hir::PatKind::Slice docs
Centril Dec 15, 2019
7bf55f4
use Self alias in place of macros
tesuji Dec 15, 2019
5d73af2
make transparent enums more ordinary
Centril Dec 15, 2019
5f68732
Fix JS error when loading page with search
GuillaumeGomez Dec 15, 2019
d9d1f23
improve lower_pat_slice docs + while -> for
Centril Dec 15, 2019
00e7ff4
.gitignore: Don't ignore a file that exists in the repository
joshtriplett Dec 16, 2019
d16b088
Minor: update Unsize docs for dyn syntax
petertodd Dec 16, 2019
3de1923
Add benchmarks for `start_with` and `ends_with`
ranma42 Dec 16, 2019
a6f817f
Rollup merge of #67249 - ranma42:improve-starts-with-literal-char, r=…
Centril Dec 16, 2019
a1580e7
Rollup merge of #67308 - dtolnay:fast_rebind, r=alexcrichton
Centril Dec 16, 2019
f05646e
Rollup merge of #67318 - Centril:spdocs, r=matthewjasper
Centril Dec 16, 2019
5609683
Rollup merge of #67322 - lzutao:nonzero-use-self, r=joshtriplett
Centril Dec 16, 2019
a1e8a0a
Rollup merge of #67323 - Centril:tes, r=davidtwco
Centril Dec 16, 2019
55753a6
Rollup merge of #67336 - GuillaumeGomez:fix-js-error, r=Dylan-DPC
Centril Dec 16, 2019
a17535c
Rollup merge of #67344 - joshtriplett:no-gitignore-of-committed-files…
Centril Dec 16, 2019
733559b
Rollup merge of #67349 - petertodd:2019-unsize-docs, r=Centril
Centril Dec 16, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -55,5 +55,6 @@ config.mk
config.stamp
Session.vim
.cargo
!/src/test/run-make/thumb-none-qemu/example/.cargo
no_llvm_build
# Before adding new lines, see the comment at the top.
1 change: 1 addition & 0 deletions src/libcore/benches/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,5 @@ mod hash;
mod iter;
mod num;
mod ops;
mod pattern;
mod slice;
43 changes: 43 additions & 0 deletions src/libcore/benches/pattern.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
use test::black_box;
use test::Bencher;

#[bench]
fn starts_with_char(b: &mut Bencher) {
let text = black_box("kdjsfhlakfhlsghlkvcnljknfqiunvcijqenwodind");
b.iter(|| {
for _ in 0..1024 {
black_box(text.starts_with('k'));
}
})
}

#[bench]
fn starts_with_str(b: &mut Bencher) {
let text = black_box("kdjsfhlakfhlsghlkvcnljknfqiunvcijqenwodind");
b.iter(|| {
for _ in 0..1024 {
black_box(text.starts_with("k"));
}
})
}


#[bench]
fn ends_with_char(b: &mut Bencher) {
let text = black_box("kdjsfhlakfhlsghlkvcnljknfqiunvcijqenwodind");
b.iter(|| {
for _ in 0..1024 {
black_box(text.ends_with('k'));
}
})
}

#[bench]
fn ends_with_str(b: &mut Bencher) {
let text = black_box("kdjsfhlakfhlsghlkvcnljknfqiunvcijqenwodind");
b.iter(|| {
for _ in 0..1024 {
black_box(text.ends_with("k"));
}
})
}
2 changes: 1 addition & 1 deletion src/libcore/marker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ pub trait Sized {
/// Types that can be "unsized" to a dynamically-sized type.
///
/// For example, the sized array type `[i8; 2]` implements `Unsize<[i8]>` and
/// `Unsize<fmt::Debug>`.
/// `Unsize<dyn fmt::Debug>`.
///
/// All implementations of `Unsize` are provided automatically by the compiler.
///
Expand Down
4 changes: 2 additions & 2 deletions src/libcore/num/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ assert_eq!(size_of::<Option<core::num::", stringify!($Ty), ">>(), size_of::<", s
)]
#[inline]
pub const unsafe fn new_unchecked(n: $Int) -> Self {
$Ty(n)
Self(n)
}

/// Creates a non-zero if the given value is not zero.
Expand All @@ -76,7 +76,7 @@ assert_eq!(size_of::<Option<core::num::", stringify!($Ty), ">>(), size_of::<", s
pub fn new(n: $Int) -> Option<Self> {
if n != 0 {
// SAFETY: we just checked that there's no `0`
Some(unsafe { $Ty(n) })
Some(unsafe { Self(n) })
} else {
None
}
Expand Down
19 changes: 4 additions & 15 deletions src/libcore/str/pattern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -445,21 +445,13 @@ impl<'a> Pattern<'a> for char {

#[inline]
fn is_prefix_of(self, haystack: &'a str) -> bool {
if let Some(ch) = haystack.chars().next() {
self == ch
} else {
false
}
self.encode_utf8(&mut [0u8; 4]).is_prefix_of(haystack)
}

#[inline]
fn is_suffix_of(self, haystack: &'a str) -> bool where Self::Searcher: ReverseSearcher<'a>
{
if let Some(ch) = haystack.chars().next_back() {
self == ch
} else {
false
}
self.encode_utf8(&mut [0u8; 4]).is_suffix_of(haystack)
}
}

Expand Down Expand Up @@ -710,16 +702,13 @@ impl<'a, 'b> Pattern<'a> for &'b str {
/// Checks whether the pattern matches at the front of the haystack
#[inline]
fn is_prefix_of(self, haystack: &'a str) -> bool {
haystack.is_char_boundary(self.len()) &&
self == &haystack[..self.len()]
haystack.as_bytes().starts_with(self.as_bytes())
}

/// Checks whether the pattern matches at the back of the haystack
#[inline]
fn is_suffix_of(self, haystack: &'a str) -> bool {
self.len() <= haystack.len() &&
haystack.is_char_boundary(haystack.len() - self.len()) &&
self == &haystack[haystack.len() - self.len()..]
haystack.as_bytes().ends_with(self.as_bytes())
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/libcore/sync/atomic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1263,7 +1263,7 @@ let atomic_forty_two = ", stringify!($atomic_type), "::new(42);
#[$stable]
#[cfg_attr(not(bootstrap), $const_stable)]
pub const fn new(v: $int_type) -> Self {
$atomic_type {v: UnsafeCell::new(v)}
Self {v: UnsafeCell::new(v)}
}
}

Expand Down
38 changes: 26 additions & 12 deletions src/librustc/hir/lowering.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2852,19 +2852,23 @@ impl<'a> LoweringContext<'a> {
let mut rest = None;

let mut iter = pats.iter().enumerate();
while let Some((idx, pat)) = iter.next() {
// Interpret the first `..` pattern as a subtuple pattern.
for (idx, pat) in iter.by_ref() {
// Interpret the first `..` pattern as a sub-tuple pattern.
// Note that unlike for slice patterns,
// where `xs @ ..` is a legal sub-slice pattern,
// it is not a legal sub-tuple pattern.
if pat.is_rest() {
rest = Some((idx, pat.span));
break;
}
// It was not a subslice pattern so lower it normally.
// It was not a sub-tuple pattern so lower it normally.
elems.push(self.lower_pat(pat));
}

while let Some((_, pat)) = iter.next() {
// There was a previous subtuple pattern; make sure we don't allow more.
for (_, pat) in iter {
// There was a previous sub-tuple pattern; make sure we don't allow more...
if pat.is_rest() {
// ...but there was one again, so error.
self.ban_extra_rest_pat(pat.span, rest.unwrap().1, ctx);
} else {
elems.push(self.lower_pat(pat));
Expand All @@ -2874,36 +2878,44 @@ impl<'a> LoweringContext<'a> {
(elems.into(), rest.map(|(ddpos, _)| ddpos))
}

/// Lower a slice pattern of form `[pat_0, ..., pat_n]` into
/// `hir::PatKind::Slice(before, slice, after)`.
///
/// When encountering `($binding_mode $ident @)? ..` (`slice`),
/// this is interpreted as a sub-slice pattern semantically.
/// Patterns that follow, which are not like `slice` -- or an error occurs, are in `after`.
fn lower_pat_slice(&mut self, pats: &[AstP<Pat>]) -> hir::PatKind {
let mut before = Vec::new();
let mut after = Vec::new();
let mut slice = None;
let mut prev_rest_span = None;

let mut iter = pats.iter();
while let Some(pat) = iter.next() {
// Interpret the first `((ref mut?)? x @)? ..` pattern as a subslice pattern.
// Lower all the patterns until the first occurence of a sub-slice pattern.
for pat in iter.by_ref() {
match pat.kind {
// Found a sub-slice pattern `..`. Record, lower it to `_`, and stop here.
PatKind::Rest => {
prev_rest_span = Some(pat.span);
slice = Some(self.pat_wild_with_node_id_of(pat));
break;
},
// Found a sub-slice pattern `$binding_mode $ident @ ..`.
// Record, lower it to `$binding_mode $ident @ _`, and stop here.
PatKind::Ident(ref bm, ident, Some(ref sub)) if sub.is_rest() => {
prev_rest_span = Some(sub.span);
let lower_sub = |this: &mut Self| Some(this.pat_wild_with_node_id_of(sub));
let node = self.lower_pat_ident(pat, bm, ident, lower_sub);
slice = Some(self.pat_with_node_id_of(pat, node));
break;
},
_ => {}
// It was not a subslice pattern so lower it normally.
_ => before.push(self.lower_pat(pat)),
}

// It was not a subslice pattern so lower it normally.
before.push(self.lower_pat(pat));
}

while let Some(pat) = iter.next() {
// Lower all the patterns after the first sub-slice pattern.
for pat in iter {
// There was a previous subslice pattern; make sure we don't allow more.
let rest_span = match pat.kind {
PatKind::Rest => Some(pat.span),
Expand All @@ -2915,8 +2927,10 @@ impl<'a> LoweringContext<'a> {
_ => None,
};
if let Some(rest_span) = rest_span {
// We have e.g., `[a, .., b, ..]`. That's no good, error!
self.ban_extra_rest_pat(rest_span, prev_rest_span.unwrap(), "slice");
} else {
// Lower the pattern normally.
after.push(self.lower_pat(pat));
}
}
Expand Down
11 changes: 9 additions & 2 deletions src/librustc/hir/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1048,8 +1048,15 @@ pub enum PatKind {
/// A range pattern (e.g., `1..=2` or `1..2`).
Range(P<Expr>, P<Expr>, RangeEnd),

/// `[a, b, ..i, y, z]` is represented as:
/// `PatKind::Slice(box [a, b], Some(i), box [y, z])`.
/// A slice pattern, `[before_0, ..., before_n, (slice, after_0, ..., after_n)?]`.
///
/// Here, `slice` is lowered from the syntax `($binding_mode $ident @)? ..`.
/// If `slice` exists, then `after` can be non-empty.
///
/// The representation for e.g., `[a, b, .., c, d]` is:
/// ```
/// PatKind::Slice([Binding(a), Binding(b)], Some(Wild), [Binding(c), Binding(d)])
/// ```
Slice(HirVec<P<Pat>>, Option<P<Pat>>, HirVec<P<Pat>>),
}

Expand Down
41 changes: 20 additions & 21 deletions src/librustc_typeck/check/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2330,7 +2330,7 @@ fn bad_variant_count<'tcx>(tcx: TyCtxt<'tcx>, adt: &'tcx ty::AdtDef, sp: Span, d
);
let mut err = struct_span_err!(tcx.sess, sp, E0731, "transparent enum {}", msg);
err.span_label(sp, &msg);
if let &[ref start @ .., ref end] = &variant_spans[..] {
if let [start @ .., end] = &*variant_spans {
for variant_span in start {
err.span_label(*variant_span, "");
}
Expand Down Expand Up @@ -2372,23 +2372,14 @@ fn check_transparent(tcx: TyCtxt<'_>, sp: Span, def_id: DefId) {
}
let sp = tcx.sess.source_map().def_span(sp);

if adt.is_enum() {
if !tcx.features().transparent_enums {
feature_err(
&tcx.sess.parse_sess,
sym::transparent_enums,
sp,
"transparent enums are unstable",
)
.emit();
}
if adt.variants.len() != 1 {
bad_variant_count(tcx, adt, sp, def_id);
if adt.variants.is_empty() {
// Don't bother checking the fields. No variants (and thus no fields) exist.
return;
}
}
if adt.is_enum() && !tcx.features().transparent_enums {
feature_err(
&tcx.sess.parse_sess,
sym::transparent_enums,
sp,
"transparent enums are unstable",
)
.emit();
}

if adt.is_union() && !tcx.features().transparent_unions {
Expand All @@ -2401,6 +2392,14 @@ fn check_transparent(tcx: TyCtxt<'_>, sp: Span, def_id: DefId) {
.emit();
}

if adt.variants.len() != 1 {
bad_variant_count(tcx, adt, sp, def_id);
if adt.variants.is_empty() {
// Don't bother checking the fields. No variants (and thus no fields) exist.
return;
}
}

// For each field, figure out if it's known to be a ZST and align(1)
let field_infos = adt.all_fields().map(|field| {
let ty = field.ty(tcx, InternalSubsts::identity_for_item(tcx, field.did));
Expand Down Expand Up @@ -5351,9 +5350,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
directly, not through a function pointer");
}

// Resolves `typ` by a single level if `typ` is a type variable.
// If no resolution is possible, then an error is reported.
// Numeric inference variables may be left unresolved.
/// Resolves `typ` by a single level if `typ` is a type variable.
/// If no resolution is possible, then an error is reported.
/// Numeric inference variables may be left unresolved.
pub fn structurally_resolved_type(&self, sp: Span, ty: Ty<'tcx>) -> Ty<'tcx> {
let ty = self.resolve_vars_with_obligations(ty);
if !ty.is_ty_var() {
Expand Down
25 changes: 25 additions & 0 deletions src/librustc_typeck/check/pat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1154,6 +1154,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.tcx.mk_ref(region, mt)
}

/// Type check a slice pattern.
///
/// Syntactically, these look like `[pat_0, ..., pat_n]`.
/// Semantically, we are type checking a pattern with structure:
/// ```
/// [before_0, ..., before_n, (slice, after_0, ... after_n)?]
/// ```
/// The type of `slice`, if it is present, depends on the `expected` type.
/// If `slice` is missing, then so is `after_i`.
/// If `slice` is present, it can still represent 0 elements.
fn check_pat_slice(
&self,
span: Span,
Expand All @@ -1167,27 +1177,39 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let tcx = self.tcx;
let expected_ty = self.structurally_resolved_type(span, expected);
let (inner_ty, slice_ty) = match expected_ty.kind {
// An array, so we might have something like `let [a, b, c] = [0, 1, 2];`.
ty::Array(inner_ty, size) => {
let slice_ty = if let Some(size) = size.try_eval_usize(tcx, self.param_env) {
// Now we know the length...
let min_len = before.len() as u64 + after.len() as u64;
if slice.is_none() {
// ...and since there is no variable-length pattern,
// we require an exact match between the number of elements
// in the array pattern and as provided by the matched type.
if min_len != size {
self.error_scrutinee_inconsistent_length(span, min_len, size)
}
tcx.types.err
} else if let Some(rest) = size.checked_sub(min_len) {
// The variable-length pattern was there,
// so it has an array type with the remaining elements left as its size...
tcx.mk_array(inner_ty, rest)
} else {
// ...however, in this case, there were no remaining elements.
// That is, the slice pattern requires more than the array type offers.
self.error_scrutinee_with_rest_inconsistent_length(span, min_len, size);
tcx.types.err
}
} else {
// No idea what the length is, which happens if we have e.g.,
// `let [a, b] = arr` where `arr: [T; N]` where `const N: usize`.
self.error_scrutinee_unfixed_length(span);
tcx.types.err
};
(inner_ty, slice_ty)
}
ty::Slice(inner_ty) => (inner_ty, expected_ty),
// The expected type must be an array or slice, but was neither, so error.
_ => {
if !expected_ty.references_error() {
self.error_expected_array_or_slice(span, expected_ty);
Expand All @@ -1196,12 +1218,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
};

// Type check all the patterns before `slice`.
for elt in before {
self.check_pat(&elt, inner_ty, def_bm, discrim_span);
}
// Type check the `slice`, if present, against its expected type.
if let Some(slice) = slice {
self.check_pat(&slice, slice_ty, def_bm, discrim_span);
}
// Type check the elements after `slice`, if present.
for elt in after {
self.check_pat(&elt, inner_ty, def_bm, discrim_span);
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/html/static/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -2683,7 +2683,7 @@ function getSearchElement() {
insertAfter(popup, getSearchElement());
}

onHashChange();
onHashChange(null);
window.onhashchange = onHashChange;

buildHelperPopup();
Expand Down
Loading