diff --git a/src/compiletest/common.rs b/src/compiletest/common.rs index 202a87fcdc9e7..c29f74d741810 100644 --- a/src/compiletest/common.rs +++ b/src/compiletest/common.rs @@ -43,9 +43,9 @@ impl FromStr for Mode { } } -impl fmt::Show for Mode { +impl fmt::String for Mode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let msg = match *self { + fmt::String::fmt(match *self { CompileFail => "compile-fail", RunFail => "run-fail", RunPass => "run-pass", @@ -54,8 +54,13 @@ impl fmt::Show for Mode { DebugInfoGdb => "debuginfo-gdb", DebugInfoLldb => "debuginfo-lldb", Codegen => "codegen", - }; - msg.fmt(f) + }, f) + } +} + +impl fmt::Show for Mode { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) } } diff --git a/src/compiletest/compiletest.rs b/src/compiletest/compiletest.rs index 0ce31a335d8ab..e2420b0a22024 100644 --- a/src/compiletest/compiletest.rs +++ b/src/compiletest/compiletest.rs @@ -9,21 +9,15 @@ // except according to those terms. #![crate_type = "bin"] -#![feature(phase, slicing_syntax, globs, unboxed_closures)] +#![feature(slicing_syntax, unboxed_closures)] #![deny(warnings)] extern crate test; extern crate getopts; -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate log; - -#[cfg(not(stage0))] #[macro_use] extern crate log; - extern crate regex; use std::os; @@ -108,7 +102,7 @@ pub fn parse_config(args: Vec ) -> Config { let matches = &match getopts::getopts(args_.as_slice(), groups.as_slice()) { Ok(m) => m, - Err(f) => panic!("{}", f) + Err(f) => panic!("{:?}", f) }; if matches.opt_present("h") || matches.opt_present("help") { @@ -127,7 +121,7 @@ pub fn parse_config(args: Vec ) -> Config { match regex::Regex::new(s) { Ok(re) => Some(re), Err(e) => { - println!("failed to parse filter /{}/: {}", s, e); + println!("failed to parse filter /{}/: {:?}", s, e); panic!() } } @@ -186,11 +180,11 @@ pub fn parse_config(args: Vec ) -> Config { pub fn log_config(config: &Config) { let c = config; logv(c, format!("configuration:")); - logv(c, format!("compile_lib_path: {}", config.compile_lib_path)); - logv(c, format!("run_lib_path: {}", config.run_lib_path)); - logv(c, format!("rustc_path: {}", config.rustc_path.display())); - logv(c, format!("src_base: {}", config.src_base.display())); - logv(c, format!("build_base: {}", config.build_base.display())); + logv(c, format!("compile_lib_path: {:?}", config.compile_lib_path)); + logv(c, format!("run_lib_path: {:?}", config.run_lib_path)); + logv(c, format!("rustc_path: {:?}", config.rustc_path.display())); + logv(c, format!("src_base: {:?}", config.src_base.display())); + logv(c, format!("build_base: {:?}", config.build_base.display())); logv(c, format!("stage_id: {}", config.stage_id)); logv(c, format!("mode: {}", config.mode)); logv(c, format!("run_ignored: {}", config.run_ignored)); @@ -206,10 +200,10 @@ pub fn log_config(config: &Config) { logv(c, format!("jit: {}", config.jit)); logv(c, format!("target: {}", config.target)); logv(c, format!("host: {}", config.host)); - logv(c, format!("android-cross-path: {}", + logv(c, format!("android-cross-path: {:?}", config.android_cross_path.display())); - logv(c, format!("adb_path: {}", config.adb_path)); - logv(c, format!("adb_test_dir: {}", config.adb_test_dir)); + logv(c, format!("adb_path: {:?}", config.adb_path)); + logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir)); logv(c, format!("adb_device_status: {}", config.adb_device_status)); match config.test_shard { @@ -271,7 +265,7 @@ pub fn run_tests(config: &Config) { Ok(true) => {} Ok(false) => panic!("Some tests failed"), Err(e) => { - println!("I/O failure during tests: {}", e); + println!("I/O failure during tests: {:?}", e); } } } @@ -299,13 +293,13 @@ pub fn test_opts(config: &Config) -> test::TestOpts { } pub fn make_tests(config: &Config) -> Vec { - debug!("making tests from {}", + debug!("making tests from {:?}", config.src_base.display()); let mut tests = Vec::new(); let dirs = fs::readdir(&config.src_base).unwrap(); for file in dirs.iter() { let file = file.clone(); - debug!("inspecting file {}", file.display()); + debug!("inspecting file {:?}", file.display()); if is_test(config, &file) { let t = make_test(config, &file, || { match config.mode { diff --git a/src/compiletest/errors.rs b/src/compiletest/errors.rs index f330bb3143eab..dcfac688c7f62 100644 --- a/src/compiletest/errors.rs +++ b/src/compiletest/errors.rs @@ -84,7 +84,7 @@ fn parse_expected(last_nonfollow_error: Option, (which, line) }; - debug!("line={} which={} kind={} msg={}", line_num, which, kind, msg); + debug!("line={} which={:?} kind={:?} msg={:?}", line_num, which, kind, msg); Some((which, ExpectedError { line: line, kind: kind, msg: msg, })) diff --git a/src/compiletest/runtest.rs b/src/compiletest/runtest.rs index 875061e69b7a2..f8e2ba4828f38 100644 --- a/src/compiletest/runtest.rs +++ b/src/compiletest/runtest.rs @@ -61,7 +61,7 @@ pub fn run_metrics(config: Config, testfile: String, mm: &mut MetricMap) { print!("\n\n"); } let testfile = Path::new(testfile); - debug!("running {}", testfile.display()); + debug!("running {:?}", testfile.display()); let props = header::load_props(&testfile); debug!("loaded props"); match config.mode { @@ -141,7 +141,7 @@ fn check_correct_failure_status(proc_res: &ProcRes) { static RUST_ERR: int = 101; if !proc_res.status.matches_exit_status(RUST_ERR) { fatal_proc_rec( - format!("failure produced the wrong error: {}", + format!("failure produced the wrong error: {:?}", proc_res.status).as_slice(), proc_res); } @@ -410,7 +410,7 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) { ], vec!(("".to_string(), "".to_string())), Some("".to_string())) - .expect(format!("failed to exec `{}`", config.adb_path).as_slice()); + .expect(format!("failed to exec `{:?}`", config.adb_path).as_slice()); procsrv::run("", config.adb_path.as_slice(), @@ -422,7 +422,7 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) { ], vec!(("".to_string(), "".to_string())), Some("".to_string())) - .expect(format!("failed to exec `{}`", config.adb_path).as_slice()); + .expect(format!("failed to exec `{:?}`", config.adb_path).as_slice()); let adb_arg = format!("export LD_LIBRARY_PATH={}; \ gdbserver :5039 {}/{}", @@ -443,11 +443,11 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) { vec!(("".to_string(), "".to_string())), Some("".to_string())) - .expect(format!("failed to exec `{}`", config.adb_path).as_slice()); + .expect(format!("failed to exec `{:?}`", config.adb_path).as_slice()); loop { //waiting 1 second for gdbserver start timer::sleep(Duration::milliseconds(1000)); - let result = Thread::spawn(move || { + let result = Thread::scoped(move || { tcp::TcpStream::connect("127.0.0.1:5039").unwrap(); }).join(); if result.is_err() { @@ -481,7 +481,7 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) { debugger_opts.as_slice(), vec!(("".to_string(), "".to_string())), None) - .expect(format!("failed to exec `{}`", gdb_path).as_slice()); + .expect(format!("failed to exec `{:?}`", gdb_path).as_slice()); let cmdline = { let cmdline = make_cmdline("", "arm-linux-androideabi-gdb", @@ -539,18 +539,17 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) { script_str.push_str("set print pretty off\n"); // Add the pretty printer directory to GDB's source-file search path - script_str.push_str(format!("directory {}\n", rust_pp_module_abs_path)[]); + script_str.push_str(&format!("directory {}\n", rust_pp_module_abs_path)[]); // Load the target executable - script_str.push_str(format!("file {}\n", - exe_file.as_str().unwrap().replace("\\", "\\\\")) - .as_slice()); + script_str.push_str(&format!("file {}\n", + exe_file.as_str().unwrap().replace("\\", "\\\\"))[]); // Add line breakpoints for line in breakpoint_lines.iter() { - script_str.push_str(format!("break '{}':{}\n", - testfile.filename_display(), - *line)[]); + script_str.push_str(&format!("break '{:?}':{}\n", + testfile.filename_display(), + *line)[]); } script_str.push_str(cmds.as_slice()); @@ -676,7 +675,7 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path) .unwrap() .to_string(); - script_str.push_str(format!("command script import {}\n", rust_pp_module_abs_path[])[]); + script_str.push_str(&format!("command script import {}\n", &rust_pp_module_abs_path[])[]); script_str.push_str("type summary add --no-value "); script_str.push_str("--python-function lldb_rust_formatters.print_val "); script_str.push_str("-x \".*\" --category Rust\n"); @@ -889,7 +888,7 @@ fn check_error_patterns(props: &TestProps, output_to_check: &str, proc_res: &ProcRes) { if props.error_patterns.is_empty() { - fatal(format!("no error pattern specified in {}", + fatal(format!("no error pattern specified in {:?}", testfile.display()).as_slice()); } let mut next_err_idx = 0u; @@ -910,7 +909,7 @@ fn check_error_patterns(props: &TestProps, if done { return; } let missing_patterns = - props.error_patterns[next_err_idx..]; + props.error_patterns.index(&(next_err_idx..)); if missing_patterns.len() == 1u { fatal_proc_rec(format!("error pattern '{}' not found!", missing_patterns[0]).as_slice(), @@ -955,7 +954,7 @@ fn check_expected_errors(expected_errors: Vec , } let prefixes = expected_errors.iter().map(|ee| { - format!("{}:{}:", testfile.display(), ee.line) + format!("{:?}:{}:", testfile.display(), ee.line) }).collect:: >(); #[cfg(windows)] @@ -1191,7 +1190,7 @@ fn compose_and_run_compiler( None); if !auxres.status.success() { fatal_proc_rec( - format!("auxiliary build of {} failed to compile: ", + format!("auxiliary build of {:?} failed to compile: ", abs_ab.display()).as_slice(), &auxres); } @@ -1601,7 +1600,7 @@ fn _arm_push_aux_shared_library(config: &Config, testfile: &Path) { .expect(format!("failed to exec `{}`", config.adb_path).as_slice()); if config.verbose { - println!("push ({}) {} {} {}", + println!("push ({}) {:?} {} {}", config.target, file.display(), copy_result.out, copy_result.err); } diff --git a/src/doc/guide-error-handling.md b/src/doc/guide-error-handling.md index d833827981e27..d241e77f810c7 100644 --- a/src/doc/guide-error-handling.md +++ b/src/doc/guide-error-handling.md @@ -167,10 +167,10 @@ fn parse_version(header: &[u8]) -> Result { let version = parse_version(&[1, 2, 3, 4]); match version { Ok(v) => { - println!("working with version: {}", v); + println!("working with version: {:?}", v); } Err(e) => { - println!("error parsing header: {}", e); + println!("error parsing header: {:?}", e); } } ``` diff --git a/src/doc/guide-macros.md b/src/doc/guide-macros.md index 9cb4d154de7b2..95f5305775eed 100644 --- a/src/doc/guide-macros.md +++ b/src/doc/guide-macros.md @@ -42,7 +42,7 @@ the pattern in the above code: # let input_1 = T::SpecialA(0); # let input_2 = T::SpecialA(0); macro_rules! early_return { - ($inp:expr $sp:path) => ( // invoke it like `(input_5 SpecialE)` + ($inp:expr, $sp:path) => ( // invoke it like `(input_5 SpecialE)` match $inp { $sp(x) => { return x; } _ => {} @@ -50,9 +50,9 @@ macro_rules! early_return { ); } // ... -early_return!(input_1 T::SpecialA); +early_return!(input_1, T::SpecialA); // ... -early_return!(input_2 T::SpecialB); +early_return!(input_2, T::SpecialB); # return 0; # } # fn main() {} diff --git a/src/doc/guide-pointers.md b/src/doc/guide-pointers.md index 678e817e2ebbe..14e33ab0f74a9 100644 --- a/src/doc/guide-pointers.md +++ b/src/doc/guide-pointers.md @@ -620,7 +620,7 @@ enum List { fn main() { let list: List = List::Cons(1, box List::Cons(2, box List::Cons(3, box List::Nil))); - println!("{}", list); + println!("{:?}", list); } ``` diff --git a/src/doc/guide-unsafe.md b/src/doc/guide-unsafe.md index bda1b34563208..11bc0bc30f20e 100644 --- a/src/doc/guide-unsafe.md +++ b/src/doc/guide-unsafe.md @@ -703,10 +703,10 @@ Other features provided by lang items include: `deref`, and `add` respectively. - stack unwinding and general failure; the `eh_personality`, `fail` and `fail_bounds_checks` lang items. -- the traits in `std::kinds` used to indicate types that satisfy +- the traits in `std::markers` used to indicate types of various kinds; lang items `send`, `sync` and `copy`. - the marker types and variance indicators found in - `std::kinds::markers`; lang items `covariant_type`, + `std::markers`; lang items `covariant_type`, `contravariant_lifetime`, `no_sync_bound`, etc. Lang items are loaded lazily by the compiler; e.g. if one never uses diff --git a/src/doc/intro.md b/src/doc/intro.md index a4e9d85bffdf8..fbc96a577a41a 100644 --- a/src/doc/intro.md +++ b/src/doc/intro.md @@ -395,7 +395,7 @@ fn main() { for _ in range(0u, 10u) { Thread::spawn(move || { println!("Hello, world!"); - }).detach(); + }); } } ``` @@ -405,8 +405,7 @@ This program creates ten threads, who all print `Hello, world!`. The double bars `||`. (The `move` keyword indicates that the closure takes ownership of any data it uses; we'll have more on the significance of this shortly.) This closure is executed in a new thread created by -`spawn`. The `detach` method means that the child thread is allowed to -outlive its parent. +`spawn`. One common form of problem in concurrent programs is a 'data race.' This occurs when two different threads attempt to access the same @@ -429,7 +428,7 @@ fn main() { for i in range(0u, 3u) { Thread::spawn(move || { for j in range(0, 3) { numbers[j] += 1 } - }).detach(); + }); } } ``` @@ -488,7 +487,7 @@ fn main() { (*array)[i] += 1; println!("numbers[{}] is {}", i, (*array)[i]); - }).detach(); + }); } } ``` diff --git a/src/doc/reference.md b/src/doc/reference.md index 0f1f26d3e711c..a907f096809e6 100644 --- a/src/doc/reference.md +++ b/src/doc/reference.md @@ -690,10 +690,9 @@ balanced, but they are otherwise not special. In the matcher, `$` _name_ `:` _designator_ matches the nonterminal in the Rust syntax named by _designator_. Valid designators are `item`, `block`, `stmt`, -`pat`, `expr`, `ty` (type), `ident`, `path`, `matchers` (lhs of the `=>` in -macro rules), `tt` (rhs of the `=>` in macro rules). In the transcriber, the -designator is already known, and so only the name of a matched nonterminal -comes after the dollar sign. +`pat`, `expr`, `ty` (type), `ident`, `path`, `tt` (either side of the `=>` +in macro rules). In the transcriber, the designator is already known, and so +only the name of a matched nonterminal comes after the dollar sign. In both the matcher and transcriber, the Kleene star-like operator indicates repetition. The Kleene star operator consists of `$` and parens, optionally diff --git a/src/etc/gdb_rust_pretty_printing.py b/src/etc/gdb_rust_pretty_printing.py old mode 100644 new mode 100755 index 7e5918ea39e1e..b6770c99975f1 --- a/src/etc/gdb_rust_pretty_printing.py +++ b/src/etc/gdb_rust_pretty_printing.py @@ -51,7 +51,7 @@ def rust_pretty_printer_lookup_function(val): enum_member_count = len(enum_members) if enum_member_count == 0: - return RustStructPrinter(val, false) + return RustStructPrinter(val, False) if enum_member_count == 1: first_variant_name = enum_members[0].name @@ -60,21 +60,27 @@ def rust_pretty_printer_lookup_function(val): return rust_pretty_printer_lookup_function(val[enum_members[0]]) else: assert first_variant_name.startswith("RUST$ENCODED$ENUM$") - # This is a space-optimized enum + # This is a space-optimized enum. + # This means this enum has only two states, and Rust uses one of the + # fields somewhere in the struct to determine which of the two states + # it's in. The location of the field is encoded in the name as something + # like RUST$ENCODED$ENUM$(num$)*name_of_zero_state last_separator_index = first_variant_name.rfind("$") - second_last_separator_index = first_variant_name.rfind("$", 0, last_separator_index) - disr_field_index = first_variant_name[second_last_separator_index + 1 : - last_separator_index] - disr_field_index = int(disr_field_index) + start_index = len("RUST$ENCODED$ENUM$") + disr_field_indices = first_variant_name[start_index : + last_separator_index].split("$") + disr_field_indices = [int(index) for index in disr_field_indices] sole_variant_val = val[enum_members[0]] - disr_field = get_field_at_index(sole_variant_val, disr_field_index) - discriminant = sole_variant_val[disr_field] + discriminant = sole_variant_val + for disr_field_index in disr_field_indices: + disr_field = get_field_at_index(discriminant, disr_field_index) + discriminant = discriminant[disr_field] # If the discriminant field is a fat pointer we have to consider the # first word as the true discriminant if discriminant.type.code == gdb.TYPE_CODE_STRUCT: - discriminant = discriminant[get_field_at_index(discriminant, 0)] + discriminant = discriminant[get_field_at_index(discriminant, 0)] if discriminant == 0: null_variant_name = first_variant_name[last_separator_index + 1:] @@ -234,4 +240,5 @@ def get_field_at_index(val, index): for field in val.type.fields(): if i == index: return field + i += 1 return None diff --git a/src/etc/lldb_rust_formatters.py b/src/etc/lldb_rust_formatters.py index f4f1a5121d195..05d71902904b4 100644 --- a/src/etc/lldb_rust_formatters.py +++ b/src/etc/lldb_rust_formatters.py @@ -79,11 +79,11 @@ def print_struct_val_starting_from(field_start_index, val, internal_dict): has_field_names = type_has_field_names(t) if has_field_names: - template = "%(type_name)s {\n%(body)s\n}" - separator = ", \n" + template = "%(type_name)s {\n%(body)s\n}" + separator = ", \n" else: - template = "%(type_name)s(%(body)s)" - separator = ", " + template = "%(type_name)s(%(body)s)" + separator = ", " if type_name.startswith("("): # this is a tuple, so don't print the type name @@ -125,25 +125,25 @@ def print_enum_val(val, internal_dict): if last_separator_index == -1: return "" % first_variant_name - second_last_separator_index = first_variant_name.rfind("$", 0, last_separator_index) - if second_last_separator_index == -1: - return "" % first_variant_name + start_index = len("RUST$ENCODED$ENUM$") - # Extract index of the discriminator field + # Extract indices of the discriminator field try: - disr_field_index = first_variant_name[second_last_separator_index + 1 : - last_separator_index] - disr_field_index = int(disr_field_index) + disr_field_indices = first_variant_name[start_index : + last_separator_index].split("$") + disr_field_indices = [int(index) for index in disr_field_indices] except: return "" % first_variant_name # Read the discriminant - disr_val = val.GetChildAtIndex(0).GetChildAtIndex(disr_field_index) + disr_val = val.GetChildAtIndex(0) + for index in disr_field_indices: + disr_val = disr_val.GetChildAtIndex(index) # If the discriminant field is a fat pointer we have to consider the # first word as the true discriminant if disr_val.GetType().GetTypeClass() == lldb.eTypeClassStruct: - disr_val = disr_val.GetChildAtIndex(0) + disr_val = disr_val.GetChildAtIndex(0) if disr_val.GetValueAsUnsigned() == 0: # Null case: Print the name of the null-variant diff --git a/src/etc/zsh/_rust b/src/etc/zsh/_rust index f4e8f6f7873c9..404f622f970c3 100644 --- a/src/etc/zsh/_rust +++ b/src/etc/zsh/_rust @@ -20,35 +20,37 @@ _rustc_crate_types=( _rustc_emit_types=( 'asm' - 'bc' - 'ir' + 'llvm-bc' + 'llvm-ir' 'obj' 'link' + 'dep-info' ) _rustc_pretty_types=( 'normal[un-annotated source]' 'expanded[crates expanded]' 'typed[crates expanded, with type annotations]' 'identified[fully parenthesized, AST nodes and blocks with IDs]' - 'flowgraph=[graphviz formatted flowgraph for node]:NODEID:' + 'flowgraph[graphviz formatted flowgraph for node]:NODEID:' ) _rustc_color_types=( 'auto[colorize, if output goes to a tty (default)]' 'always[always colorize output]' 'never[never colorize output]' ) +_rustc_info_types=( + 'crate-name[Output the crate name and exit]' + 'file-names[Output the file(s) that would be written if compilation continued and exited]' + 'sysroot[Output the sysroot and exit]' +) _rustc_opts_vals=( --crate-name='[Specify the name of the crate being built]' --crate-type='[Comma separated list of types of crates for the compiler to emit]:TYPES:_values -s "," "Crate types" "$_rustc_crate_types[@]"' --emit='[Comma separated list of types of output for the compiler to emit]:TYPES:_values -s "," "Emit Targets" "$_rustc_emit_types[@]"' - --debuginfo='[Emit DWARF debug info to the objects created]:LEVEL:_values "Debug Levels" "$_rustc_debuginfo_levels[@]"' - --dep-info='[Output dependency info to after compiling]::FILE:_files -/' - --sysroot='[Override the system root]:PATH:_files -/' --cfg='[Configure the compilation environment]:SPEC:' --out-dir='[Write output to compiler-chosen filename in . Ignored if -o is specified. (default the current directory)]:DIR:_files -/' -o'[Write output to . Ignored if more than one --emit is specified.]:FILENAME:_files' - --opt-level='[Optimize with possible levels 0-3]:LEVEL:(0 1 2 3)' --pretty='[Pretty-print the input instead of compiling]::TYPE:_values "TYPES" "$_rustc_pretty_types[@]"' -L'[Add a directory to the library search path]:DIR:_files -/' --target='[Target triple cpu-manufacturer-kernel\[-os\] to compile]:TRIPLE:' @@ -56,27 +58,33 @@ _rustc_opts_vals=( {-v,--version}'[Print version info and exit]::VERBOSE:(verbose)' --explain='[Provide a detailed explanation of an error message]:OPT:' --extern'[Specify where an external rust library is located]:ARG:' + --print='[Comma separated list of compiler information to print on stdout]:TYPES:_values -s "," "Compiler Information" "$_rustc_info_types[@]"' ) _rustc_opts_switches=( - -g'[Equivalent to --debuginfo=2]' - {-h,--help}'[Display this message]' - --no-analysis'[Parse and expand the output, but run no analysis or produce output]' - --no-trans'[Run all passes except translation; no output]' - -O'[Equivalent to --opt-level=2]' - --parse-only'[Parse only; do not compile, assemble, or link]' - --print-crate-name'[Output the crate name and exit]' - --print-file-name'[Output the file(s) that would be written if compilation continued and exit]' + -g'[Equivalent to -C debuginfo=2]' + {-h,--help}'[Display the help message]' + {-V,--verbose}'[use verbose output]' + -O'[Equivalent to -C opt-level=2]' --test'[Build a test harness]' ) + + +_rustc_opts_link=( + 'static[Path to the library to link statically]:PATH:_files -/' + 'dylib[Path to the library to link dynamically]:PATH:_files -/' + 'framework[Path to the library to link as a framework]:PATH:_files -/' +) + _rustc_opts_codegen=( - 'ar=[Path to the archive utility to use when assembling archives.]:BIN:_path_files' - 'linker=[Path to the linker utility to use when linking libraries, executables, and objects.]:BIN:_path_files' - 'link-args=[A space-separated list of extra arguments to pass to the linker when the linker is invoked.]:ARGS:' - 'target-cpu=[Selects a target processor. If the value is "help", then a list of available CPUs is printed.]:CPU:' - 'target-feature=[A space-separated list of features to enable or disable for the target. A preceding "+" enables a feature while a preceding "-" disables it. Available features can be discovered through target-cpu=help.]:FEATURE:' - 'passes=[A space-separated list of extra LLVM passes to run. A value of "list" will cause rustc to print all known passes and exit. The passes specified are appended at the end of the normal pass manager.]:LIST:' - 'llvm-args=[A space-separated list of arguments to pass through to LLVM.]:ARGS:' + 'ar[Path to the archive utility to use when assembling archives.]:BIN:_path_files' + 'linker[Path to the linker utility to use when linking libraries, executables, and objects.]:BIN:_path_files' + 'link-args[A space-separated list of extra arguments to pass to the linker when the linker is invoked.]:ARGS:' + 'lto[Perform LLVM link-time optimizations]' + 'target-cpu[Selects a target processor. If the value is "help", then a list of available CPUs is printed.]:CPU:' + 'target-feature[A space-separated list of features to enable or disable for the target. A preceding "+" enables a feature while a preceding "-" disables it. Available features can be discovered through target-cpu=help.]:FEATURE:' + 'passes[A space-separated list of extra LLVM passes to run. A value of "list" will cause rustc to print all known passes and exit. The passes specified are appended at the end of the normal pass manager.]:LIST:' + 'llvm-args[A space-separated list of arguments to pass through to LLVM.]:ARGS:' 'save-temps[If specified, the compiler will save more files (.bc, .o, .no-opt.bc) generated throughout compilation in the output directory.]' 'rpath[If specified, then the rpath value for dynamic libraries will be set in either dynamic library or executable outputs.]' 'no-prepopulate-passes[Suppresses pre-population of the LLVM pass manager that is run over the module.]' @@ -86,55 +94,62 @@ _rustc_opts_codegen=( 'prefer-dynamic[Prefers dynamic linking to static linking.]' "no-integrated-as[Force usage of an external assembler rather than LLVM's integrated one.]" 'no-redzone[disable the use of the redzone]' - 'relocation-model=[The relocation model to use. (default: pic)]:MODEL:(pic static dynamic-no-pic)' - 'code-model=[choose the code model to use (llc -code-model for details)]:MODEL:' - 'metadata=[metadata to mangle symbol names with]:VAL:' - 'extra-filenames=[extra data to put in each output filename]:VAL:' - 'codegen-units=[divide crate into N units to optimize in parallel]:N:' + 'relocation-model[The relocation model to use. (default: pic)]:MODEL:(pic static dynamic-no-pic)' + 'code-model[choose the code model to use (llc -code-model for details)]:MODEL:' + 'metadata[metadata to mangle symbol names with]:VAL:' + 'extra-filenames[extra data to put in each output filename]:VAL:' + 'codegen-units[divide crate into N units to optimize in parallel]:N:' + 'remark[print remarks for these optimization passes (space separated, or "all")]:TYPE:' + 'debuginfo[debug info emission level, 0 = no debug info, 1 = line tables only, 2 = full debug info with variable and type information]:LEVEL:_values "Debug Levels" "$_rustc_debuginfo_levels[@]"' + 'opt-level[Optimize with possible levels 0-3]:LEVEL:(0 1 2 3)' 'help[Show all codegen options]' ) _rustc_opts_lint=( 'help[Show a list of all lints]' - 'experimental[detects use of #\[experimental\] items]' - 'heap-memory[use of any (Box type or @ type) heap memory]' - 'managed-heap-memory[use of managed (@ type) heap memory]' - 'missing-doc[detects missing documentation for public members]' - 'non-uppercase-statics[static constants should have uppercase identifiers]' - 'owned-heap-memory[use of owned (~ type) heap memory]' - 'unnecessary-qualification[detects unnecessarily qualified names]' - 'unsafe-block[usage of an `unsafe` block]' - 'unstable[detects use of #\[unstable\] items (incl. items with no stability attribute)]' - 'unused-result[unused result of an expression in a statement]' - 'variant-size-difference[detects enums with widely varying variant sizes]' - 'ctypes[proper use of libc types in foreign modules]' - 'dead-assignment[detect assignments that will never be read]' - 'dead-code[detect piece of code that will never be used]' - 'deprecated[detects use of #\[deprecated\] items]' - 'non-camel-case-types[types, variants and traits should have camel case names]' - 'non-snake-case[methods, functions, lifetime parameters and modules should have snake case names]' - 'path-statement[path statements with no effect]' - 'raw-pointer-deriving[uses of #\[deriving\] with raw pointers are rarely correct]' - 'type-limits[comparisons made useless by limits of the types involved]' - 'type-overflow[literal out of range for its type]' - 'unnecessary-allocation[detects unnecessary allocations that can be eliminated]' - 'unnecessary-parens[`if`, `match`, `while` and `return` do not need parentheses]' - 'unreachable-code[detects unreachable code]' - 'unrecognized-lint[unrecognized lint attribute]' - 'unsigned-negate[using an unary minus operator on unsigned type]' - 'unused-attribute[detects attributes that were not used by the compiler]' - 'unused-imports[imports that are never used]' - 'unused-must-use[unused result of a type flagged as #\[must_use\]]' - "unused-mut[detect mut variables which don't need to be mutable]" - 'unused-unsafe[unnecessary use of an `unsafe` block]' - 'unused-variable[detect variables which are not used in any way]' - 'visible-private-types[detect use of private types in exported type signatures]' - 'warnings[mass-change the level for lints which produce warnings]' - 'while-true[suggest using `loop { }` instead of `while true { }`]' - 'unknown-crate-type[unknown crate type found in #\[crate_type\] directive]' - 'unknown-features[unknown features found in crate-level #\[feature\] directives]' - 'bad-style[group of non_camel_case_types, non_snake_case, non_uppercase_statics]' - 'unused[group of unused_imports, unused_variable, dead_assignment, dead_code, unused_mut, unreachable_code]' + 'box-pointers[(default: allow) use of owned (Box type) heap memory]' + 'experimental[(default: allow) detects use of #\[experimental\] items]' + 'fat-ptr-transmutes[(default: allow) detects transmutes of fat pointers]' + 'missing-docs[(default: allow) detects missing documentation for public members]' + 'unsafe-blocks[(default: allow) usage of an "unsafe" block]' + 'unstable[(default: allow) detects use of #\[unstable\] items (incl. items with no stability attribute)]' + 'unused-extern-crates[(default: allow) extern crates that are never used]' + 'unused-import-braces[(default: allow) unnecessary braces around an imported item]' + 'unused-qualifications[(default: allow) detects unnecessarily qualified names]' + 'unused-results[(default: allow) unused result of an expression in a statement]' + 'unused-typecasts[(default: allow) detects unnecessary type casts that can be removed]' + 'variant-size-differences[(default: allow) detects enums with widely varying variant sizes]' + 'dead-code[(default: warn) detect unused, unexported items]' + 'deprecated[(default: warn) detects use of #\[deprecated\] items]' + 'improper-ctypes[(default: warn) proper use of libc types in foreign modules]' + 'missing-copy-implementations[(default: warn) detects potentially-forgotten implementations of "Copy"]' + 'non-camel-case-types[(default: warn) types, variants, traits and type parameters should have camel case names]' + 'non-shorthand-field-patterns[(default: warn) using "Struct { x: x }" instead of "Struct { x }"]' + 'non-snake-case[(default: warn) methods, functions, lifetime parameters and modules should have snake case names]' + 'non-upper-case-globals[(default: warn) static constants should have uppercase identifiers]' + 'overflowing-literals[(default: warn) literal out of range for its type]' + 'path-statements[(default: warn) path statements with no effect]' + 'raw-pointer-deriving[(default: warn) uses of #\[derive\] with raw pointers are rarely correct]' + 'unknown-lints[(default: warn) unrecognized lint attribute]' + 'unreachable-code[(default: warn) detects unreachable code paths]' + 'unsigned-negation[(default: warn) using an unary minus operator on unsigned type]' + 'unused-allocation[(default: warn) detects unnecessary allocations that can be eliminated]' + 'unused-assignments[(default: warn) detect assignments that will never be read]' + 'unused-attributes[(default: warn) detects attributes that were not used by the compiler]' + 'unused-comparisons[(default: warn) comparisons made useless by limits of the types involved]' + 'unused-imports[(default: warn) imports that are never used]' + 'unused-must-use[(default: warn) unused result of a type flagged as must_use]' + "unused-mut[(default: warn) detect mut variables which don't need to be mutable]" + 'unused-parens[(default: warn) "if", "match", "while" and "return" do not need parentheses]' + 'unused-unsafe[(default: warn) unnecessary use of an "unsafe" block]' + 'unused-variables[(default: warn) detect variables which are not used in any way]' + 'warnings[(default: warn) mass-change the level for lints which produce warnings]' + 'while-true[(default: warn) suggest using "loop { }" instead of "while true { }"]' + "exceeding-bitshifts[(default: deny) shift exceeds the type's number of bits]" + 'unknown-crate-types[(default: deny) unknown crate type found in #\[crate_type\] directive]' + 'unknown-features[(default: deny) unknown features found in crate-level #\[feature\] directives]' + 'bad-style[non-camel-case-types, non-snake-case, non-upper-case-globals]' + 'unused[unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unused-must-use, unused-unsafe, path-statements]' ) _rustc_opts_debug=( @@ -151,11 +166,9 @@ _rustc_opts_debug=( 'show-span[show spans for compiler debugging]' 'count-type-sizes[count the sizes of aggregate types]' 'meta-stats[gather metadata statistics]' - 'no-opt[do not optimize, even if -O is passed]' 'print-link-args[Print the arguments passed to the linker]' 'gc[Garbage collect shared data (experimental)]' 'print-llvm-passes[Prints the llvm optimization passes being run]' - 'lto[Perform LLVM link-time optimizations]' 'ast-json[Print the AST as JSON and halt]' 'ast-json-noexpand[Print the pre-expansion AST as JSON and halt]' 'ls[List the symbols defined by a library crate]' @@ -164,6 +177,12 @@ _rustc_opts_debug=( 'flowgraph-print-moves[Include move analysis data in --pretty flowgraph output]' 'flowgraph-print-assigns[Include assignment analysis data in --pretty flowgraph output]' 'flowgraph-print-all[Include all dataflow analysis data in --pretty flowgraph output]' + 'print-regiion-graph[Prints region inference graph. Use with RUST_REGION_GRAPH=help for more info]' + 'parse-only[Parse only; do not compile, assemble, or link]' + 'no-trans[Run all passes except translation; no output]' + 'no-analysis[Parse and expand the source, but run no analysis]' + 'unstable-options[Adds unstable command line options to rustc interface]' + 'print-enum-sizes[Print the size of enums and their variants]' ) _rustc_opts_fun_lint(){ @@ -179,13 +198,18 @@ _rustc_opts_fun_codegen(){ _values 'options' "$_rustc_opts_codegen[@]" } +_rustc_opts_fun_link(){ + _values 'options' "$_rustc_opts_link[@]" +} + _arguments -s : \ '(-W --warn)'{-W,--warn=}'[Set lint warnings]:lint options:_rustc_opts_fun_lint' \ '(-A --allow)'{-A,--allow=}'[Set lint allowed]:lint options:_rustc_opts_fun_lint' \ '(-D --deny)'{-D,--deny=}'[Set lint denied]:lint options:_rustc_opts_fun_lint' \ '(-F --forbid)'{-F,--forbid=}'[Set lint forbidden]:lint options:_rustc_opts_fun_lint' \ '*-Z[Set internal debugging options]:debug options:_rustc_opts_fun_debug' \ - '*-C[Set internal Codegen options]:codegen options:_rustc_opts_fun_codegen' \ + '(-C --codegen)'{-C,--codegen}'[Set internal Codegen options]:codegen options:_rustc_opts_fun_codegen' \ + '*-l[Link the generated crates to the specified native library NAME. the optional KIND can be one of, static, dylib, or framework. If omitted, dylib is assumed.]:ARG:_rustc_opts_fun_link' \ "$_rustc_opts_switches[@]" \ "$_rustc_opts_vals[@]" \ '::files:_files -g "*.rs"' diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 25f80ad11bd11..8def8ad721528 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -41,8 +41,8 @@ //! let five = five.clone(); //! //! Thread::spawn(move || { -//! println!("{}", five); -//! }).detach(); +//! println!("{:?}", five); +//! }); //! } //! ``` //! @@ -63,7 +63,7 @@ //! *number += 1; //! //! println!("{}", *number); // prints 6 -//! }).detach(); +//! }); //! } //! ``` @@ -74,7 +74,7 @@ use core::clone::Clone; use core::fmt::{self, Show}; use core::cmp::{Eq, Ord, PartialEq, PartialOrd, Ordering}; use core::default::Default; -use core::kinds::{Sync, Send}; +use core::marker::{Sync, Send}; use core::mem::{min_align_of, size_of, drop}; use core::mem; use core::nonzero::NonZero; @@ -106,7 +106,7 @@ use heap::deallocate; /// let local_numbers = child_numbers.as_slice(); /// /// // Work with the local numbers -/// }).detach(); +/// }); /// } /// } /// ``` @@ -581,7 +581,7 @@ impl Eq for Arc {} impl fmt::Show for Arc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - (**self).fmt(f) + write!(f, "Arc({:?})", (**self)) } } @@ -794,7 +794,7 @@ mod tests { #[test] fn show_arc() { let a = Arc::new(5u32); - assert!(format!("{}", a) == "5") + assert!(format!("{:?}", a) == "Arc(5u32)") } // Make sure deriving works with Arc diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 6df8bb5f7aaf4..d46f18abf97b4 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -18,7 +18,7 @@ use core::cmp::{PartialEq, PartialOrd, Eq, Ord, Ordering}; use core::default::Default; use core::fmt; use core::hash::{self, Hash}; -use core::kinds::Sized; +use core::marker::Sized; use core::mem; use core::option::Option; use core::ptr::Unique; @@ -145,7 +145,13 @@ impl BoxAny for Box { impl fmt::Show for Box { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - (**self).fmt(f) + write!(f, "Box({:?})", &**self) + } +} + +impl fmt::String for Box { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(&**self, f) } } diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 001e02f9c0dd5..ba6e89cdd768e 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -65,36 +65,16 @@ #![no_std] #![allow(unknown_features)] -#![feature(lang_items, phase, unsafe_destructor, default_type_params, old_orphan_check)] -#![feature(associated_types)] +#![feature(lang_items, unsafe_destructor)] -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate core; - -#[cfg(not(stage0))] #[macro_use] extern crate core; - extern crate libc; // Allow testing this library -#[cfg(all(test, stage0))] -#[phase(plugin, link)] -extern crate std; - -#[cfg(all(test, not(stage0)))] -#[macro_use] -extern crate std; - -#[cfg(all(test, stage0))] -#[phase(plugin, link)] -extern crate log; - -#[cfg(all(test, not(stage0)))] -#[macro_use] -extern crate log; +#[cfg(test)] #[macro_use] extern crate std; +#[cfg(test)] #[macro_use] extern crate log; // Heaps provided for low-level allocation strategies diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 175bba4e71dc4..67b2542771012 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -148,7 +148,7 @@ use core::cmp::{PartialEq, PartialOrd, Eq, Ord, Ordering}; use core::default::Default; use core::fmt; use core::hash::{self, Hash}; -use core::kinds::marker; +use core::marker; use core::mem::{transmute, min_align_of, size_of, forget}; use core::nonzero::NonZero; use core::ops::{Deref, Drop}; @@ -607,7 +607,7 @@ impl> Hash for Rc { #[experimental = "Show is experimental."] impl fmt::Show for Rc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - (**self).fmt(f) + write!(f, "Rc({:?})", **self) } } @@ -962,4 +962,10 @@ mod tests { assert!(cow1_weak.upgrade().is_none()); } + #[test] + fn test_show() { + let foo = Rc::new(75u); + assert!(format!("{:?}", foo) == "Rc(75u)") + } + } diff --git a/src/libcollections/bit.rs b/src/libcollections/bit.rs index c092e000215d3..2154d06377a19 100644 --- a/src/libcollections/bit.rs +++ b/src/libcollections/bit.rs @@ -143,17 +143,17 @@ static FALSE: bool = false; /// bv.set(3, true); /// bv.set(5, true); /// bv.set(7, true); -/// println!("{}", bv.to_string()); +/// println!("{:?}", bv); /// println!("total bits set to true: {}", bv.iter().filter(|x| *x).count()); /// /// // flip all values in bitvector, producing non-primes less than 10 /// bv.negate(); -/// println!("{}", bv.to_string()); +/// println!("{:?}", bv); /// println!("total bits set to true: {}", bv.iter().filter(|x| *x).count()); /// /// // reset bitvector to empty /// bv.clear(); -/// println!("{}", bv.to_string()); +/// println!("{:?}", bv); /// println!("total bits set to true: {}", bv.iter().filter(|x| *x).count()); /// ``` #[stable] @@ -330,7 +330,7 @@ impl Bitv { if extra_bytes > 0 { let mut last_word = 0u32; - for (i, &byte) in bytes[complete_words*4..].iter().enumerate() { + for (i, &byte) in bytes.index(&((complete_words*4)..)).iter().enumerate() { last_word |= (reverse_bits(byte) as u32) << (i * 8); } bitv.storage.push(last_word); @@ -1729,13 +1729,13 @@ impl BitvSet { impl fmt::Show for BitvSet { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - try!(write!(fmt, "{{")); + try!(write!(fmt, "BitvSet {{")); let mut first = true; for n in self.iter() { if !first { try!(write!(fmt, ", ")); } - try!(write!(fmt, "{}", n)); + try!(write!(fmt, "{:?}", n)); first = false; } write!(fmt, "}}") @@ -1881,10 +1881,10 @@ mod tests { #[test] fn test_to_str() { let zerolen = Bitv::new(); - assert_eq!(zerolen.to_string(), ""); + assert_eq!(format!("{:?}", zerolen), ""); let eightbits = Bitv::from_elem(8u, false); - assert_eq!(eightbits.to_string(), "00000000") + assert_eq!(format!("{:?}", eightbits), "00000000") } #[test] @@ -1910,7 +1910,7 @@ mod tests { let mut b = Bitv::from_elem(2, false); b.set(0, true); b.set(1, false); - assert_eq!(b.to_string(), "10"); + assert_eq!(format!("{:?}", b), "10"); assert!(!b.none() && !b.all()); } @@ -2245,7 +2245,7 @@ mod tests { fn test_from_bytes() { let bitv = Bitv::from_bytes(&[0b10110110, 0b00000000, 0b11111111]); let str = concat!("10110110", "00000000", "11111111"); - assert_eq!(bitv.to_string(), str); + assert_eq!(format!("{:?}", bitv), str); } #[test] @@ -2264,7 +2264,7 @@ mod tests { fn test_from_bools() { let bools = vec![true, false, true, true]; let bitv: Bitv = bools.iter().map(|n| *n).collect(); - assert_eq!(bitv.to_string(), "1011"); + assert_eq!(format!("{:?}", bitv), "1011"); } #[test] @@ -2622,7 +2622,7 @@ mod bitv_set_test { s.insert(10); s.insert(50); s.insert(2); - assert_eq!("{1, 2, 10, 50}", s.to_string()); + assert_eq!("BitvSet {1u, 2u, 10u, 50u}", format!("{:?}", s)); } #[test] diff --git a/src/libcollections/btree/map.rs b/src/libcollections/btree/map.rs index b85ea65f5ce58..4e44779810b29 100644 --- a/src/libcollections/btree/map.rs +++ b/src/libcollections/btree/map.rs @@ -19,7 +19,7 @@ pub use self::Entry::*; use core::prelude::*; -use core::borrow::{BorrowFrom, ToOwned}; +use core::borrow::BorrowFrom; use core::cmp::Ordering; use core::default::Default; use core::fmt::Show; @@ -128,24 +128,24 @@ pub struct Values<'a, K: 'a, V: 'a> { inner: Map<(&'a K, &'a V), &'a V, Iter<'a, K, V>, fn((&'a K, &'a V)) -> &'a V> } -#[stable] /// A view into a single entry in a map, which may either be vacant or occupied. -pub enum Entry<'a, Q: ?Sized +'a, K:'a, V:'a> { +#[unstable = "precise API still under development"] +pub enum Entry<'a, K:'a, V:'a> { /// A vacant Entry - Vacant(VacantEntry<'a, Q, K, V>), + Vacant(VacantEntry<'a, K, V>), /// An occupied Entry Occupied(OccupiedEntry<'a, K, V>), } -#[stable] /// A vacant Entry. -pub struct VacantEntry<'a, Q: ?Sized +'a, K:'a, V:'a> { - key: &'a Q, +#[unstable = "precise API still under development"] +pub struct VacantEntry<'a, K:'a, V:'a> { + key: K, stack: stack::SearchStack<'a, K, V, node::handle::Edge, node::handle::Leaf>, } -#[stable] /// An occupied Entry. +#[unstable = "precise API still under development"] pub struct OccupiedEntry<'a, K:'a, V:'a> { stack: stack::SearchStack<'a, K, V, node::handle::KV, node::handle::LeafOrInternal>, } @@ -480,7 +480,7 @@ enum Continuation { /// boilerplate gets cut out. mod stack { use core::prelude::*; - use core::kinds::marker; + use core::marker; use core::mem; use core::ops::{Deref, DerefMut}; use super::BTreeMap; @@ -866,11 +866,11 @@ impl Ord for BTreeMap { #[stable] impl Show for BTreeMap { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - try!(write!(f, "{{")); + try!(write!(f, "BTreeMap {{")); for (i, (k, v)) in self.iter().enumerate() { if i != 0 { try!(write!(f, ", ")); } - try!(write!(f, "{}: {}", *k, *v)); + try!(write!(f, "{:?}: {:?}", *k, *v)); } write!(f, "}}") @@ -933,7 +933,7 @@ enum StackOp { } impl Iterator for AbsIter where - T: DoubleEndedIterator + Iterator> + Traverse, + T: DoubleEndedIterator> + Traverse, { type Item = (K, V); @@ -1002,7 +1002,7 @@ impl Iterator for AbsIter where } impl DoubleEndedIterator for AbsIter where - T: DoubleEndedIterator + Iterator> + Traverse, + T: DoubleEndedIterator> + Traverse, { // next_back is totally symmetric to next fn next_back(&mut self) -> Option<(K, V)> { @@ -1111,10 +1111,10 @@ impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> { #[stable] impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {} -impl<'a, Q: ?Sized, K: Ord, V> Entry<'a, Q, K, V> { +impl<'a, K: Ord, V> Entry<'a, K, V> { #[unstable = "matches collection reform v2 specification, waiting for dust to settle"] /// Returns a mutable reference to the entry if occupied, or the VacantEntry if vacant - pub fn get(self) -> Result<&'a mut V, VacantEntry<'a, Q, K, V>> { + pub fn get(self) -> Result<&'a mut V, VacantEntry<'a, K, V>> { match self { Occupied(entry) => Ok(entry.into_mut()), Vacant(entry) => Err(entry), @@ -1122,44 +1122,44 @@ impl<'a, Q: ?Sized, K: Ord, V> Entry<'a, Q, K, V> { } } -impl<'a, Q: ?Sized + ToOwned, K: Ord, V> VacantEntry<'a, Q, K, V> { - #[stable] +impl<'a, K: Ord, V> VacantEntry<'a, K, V> { /// Sets the value of the entry with the VacantEntry's key, /// and returns a mutable reference to it. + #[unstable = "matches collection reform v2 specification, waiting for dust to settle"] pub fn insert(self, value: V) -> &'a mut V { - self.stack.insert(self.key.to_owned(), value) + self.stack.insert(self.key, value) } } impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> { - #[stable] /// Gets a reference to the value in the entry. + #[unstable = "matches collection reform v2 specification, waiting for dust to settle"] pub fn get(&self) -> &V { self.stack.peek() } - #[stable] /// Gets a mutable reference to the value in the entry. + #[unstable = "matches collection reform v2 specification, waiting for dust to settle"] pub fn get_mut(&mut self) -> &mut V { self.stack.peek_mut() } - #[stable] /// Converts the entry into a mutable reference to its value. + #[unstable = "matches collection reform v2 specification, waiting for dust to settle"] pub fn into_mut(self) -> &'a mut V { self.stack.into_top() } - #[stable] /// Sets the value of the entry with the OccupiedEntry's key, /// and returns the entry's old value. + #[unstable = "matches collection reform v2 specification, waiting for dust to settle"] pub fn insert(&mut self, mut value: V) -> V { mem::swap(self.stack.peek_mut(), &mut value); value } - #[stable] /// Takes the value of the entry out of the map, and returns it. + #[unstable = "matches collection reform v2 specification, waiting for dust to settle"] pub fn remove(self) -> V { self.stack.remove() } @@ -1347,7 +1347,7 @@ impl BTreeMap { /// /// // count the number of occurrences of letters in the vec /// for x in vec!["a","b","a","c","a","b"].iter() { - /// match count.entry(x) { + /// match count.entry(*x) { /// Entry::Vacant(view) => { /// view.insert(1); /// }, @@ -1361,15 +1361,13 @@ impl BTreeMap { /// assert_eq!(count["a"], 3u); /// ``` /// The key must have the same ordering before or after `.to_owned()` is called. - #[stable] - pub fn entry<'a, Q: ?Sized>(&'a mut self, mut key: &'a Q) -> Entry<'a, Q, K, V> - where Q: Ord + ToOwned - { + #[unstable = "precise API still under development"] + pub fn entry<'a>(&'a mut self, mut key: K) -> Entry<'a, K, V> { // same basic logic of `swap` and `pop`, blended together let mut stack = stack::PartialSearchStack::new(self); loop { let result = stack.with(move |pusher, node| { - return match Node::search(node, key) { + return match Node::search(node, &key) { Found(handle) => { // Perfect match Finished(Occupied(OccupiedEntry { @@ -1412,7 +1410,7 @@ impl BTreeMap { #[cfg(test)] mod test { use prelude::*; - use std::borrow::{ToOwned, BorrowFrom}; + use std::borrow::BorrowFrom; use super::{BTreeMap, Occupied, Vacant}; @@ -1562,7 +1560,7 @@ mod test { let mut map: BTreeMap = xs.iter().map(|&x| x).collect(); // Existing key (insert) - match map.entry(&1) { + match map.entry(1) { Vacant(_) => unreachable!(), Occupied(mut view) => { assert_eq!(view.get(), &10); @@ -1574,7 +1572,7 @@ mod test { // Existing key (update) - match map.entry(&2) { + match map.entry(2) { Vacant(_) => unreachable!(), Occupied(mut view) => { let v = view.get_mut(); @@ -1585,7 +1583,7 @@ mod test { assert_eq!(map.len(), 6); // Existing key (take) - match map.entry(&3) { + match map.entry(3) { Vacant(_) => unreachable!(), Occupied(view) => { assert_eq!(view.remove(), 30); @@ -1596,7 +1594,7 @@ mod test { // Inexistent key (insert) - match map.entry(&10) { + match map.entry(10) { Occupied(_) => unreachable!(), Vacant(view) => { assert_eq!(*view.insert(1000), 1000); diff --git a/src/libcollections/btree/node.rs b/src/libcollections/btree/node.rs index 0a93bbf89c997..82d8dc286ee45 100644 --- a/src/libcollections/btree/node.rs +++ b/src/libcollections/btree/node.rs @@ -493,7 +493,7 @@ impl Clone for Node { /// // Now the handle still points at index 75, but on the small node, which has no index 75. /// flag.set(true); /// -/// println!("Uninitialized memory: {}", handle.into_kv()); +/// println!("Uninitialized memory: {:?}", handle.into_kv()); /// } /// ``` #[derive(Copy)] @@ -1417,7 +1417,7 @@ pub type MutTraversal<'a, K, V> = AbsTraversal = AbsTraversal>; - +#[old_impl_check] impl> Iterator for AbsTraversal { type Item = TraversalItem; @@ -1433,6 +1433,7 @@ impl> Iterator for AbsTraversal { } } +#[old_impl_check] impl> DoubleEndedIterator for AbsTraversal { fn next_back(&mut self) -> Option> { let tail_is_edge = self.tail_is_edge; diff --git a/src/libcollections/btree/set.rs b/src/libcollections/btree/set.rs index 98f1633217060..25df4a3cc2a6f 100644 --- a/src/libcollections/btree/set.rs +++ b/src/libcollections/btree/set.rs @@ -556,11 +556,11 @@ impl<'a, 'b, T: Ord + Clone> BitOr<&'b BTreeSet> for &'a BTreeSet { #[stable] impl Show for BTreeSet { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - try!(write!(f, "{{")); + try!(write!(f, "BTreeSet {{")); for (i, x) in self.iter().enumerate() { if i != 0 { try!(write!(f, ", ")); } - try!(write!(f, "{}", *x)); + try!(write!(f, "{:?}", *x)); } write!(f, "}}") @@ -842,9 +842,9 @@ mod test { set.insert(1); set.insert(2); - let set_str = format!("{}", set); + let set_str = format!("{:?}", set); - assert!(set_str == "{1, 2}"); - assert_eq!(format!("{}", empty), "{}"); + assert_eq!(set_str, "BTreeSet {1i, 2i}"); + assert_eq!(format!("{:?}", empty), "BTreeSet {}"); } } diff --git a/src/libcollections/dlist.rs b/src/libcollections/dlist.rs index 5e08f90ce1c53..63ea9f7cb4322 100644 --- a/src/libcollections/dlist.rs +++ b/src/libcollections/dlist.rs @@ -663,11 +663,11 @@ impl Clone for DList { #[stable] impl fmt::Show for DList { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - try!(write!(f, "[")); + try!(write!(f, "DList [")); for (i, e) in self.iter().enumerate() { if i != 0 { try!(write!(f, ", ")); } - try!(write!(f, "{}", *e)); + try!(write!(f, "{:?}", *e)); } write!(f, "]") @@ -924,7 +924,7 @@ mod tests { #[test] fn test_send() { let n = list_from(&[1i,2,3]); - Thread::spawn(move || { + Thread::scoped(move || { check_links(&n); let a: &[_] = &[&1,&2,&3]; assert_eq!(a, n.iter().collect::>()); @@ -1018,12 +1018,12 @@ mod tests { #[test] fn test_show() { let list: DList = range(0i, 10).collect(); - assert!(list.to_string() == "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"); + assert_eq!(format!("{:?}", list), "DList [0i, 1i, 2i, 3i, 4i, 5i, 6i, 7i, 8i, 9i]"); let list: DList<&str> = vec!["just", "one", "test", "more"].iter() .map(|&s| s) .collect(); - assert!(list.to_string() == "[just, one, test, more]"); + assert_eq!(format!("{:?}", list), "DList [\"just\", \"one\", \"test\", \"more\"]"); } #[cfg(test)] diff --git a/src/libcollections/enum_set.rs b/src/libcollections/enum_set.rs index 4b94348e87ae3..1b852d0ba680d 100644 --- a/src/libcollections/enum_set.rs +++ b/src/libcollections/enum_set.rs @@ -33,13 +33,13 @@ impl Copy for EnumSet {} impl fmt::Show for EnumSet { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - try!(write!(fmt, "{{")); + try!(write!(fmt, "EnumSet {{")); let mut first = true; for e in self.iter() { if !first { try!(write!(fmt, ", ")); } - try!(write!(fmt, "{}", e)); + try!(write!(fmt, "{:?}", e)); first = false; } write!(fmt, "}}") @@ -287,11 +287,11 @@ mod test { #[test] fn test_show() { let mut e = EnumSet::new(); - assert_eq!("{}", e.to_string()); + assert!(format!("{:?}", e) == "EnumSet {}"); e.insert(A); - assert_eq!("{A}", e.to_string()); + assert!(format!("{:?}", e) == "EnumSet {A}"); e.insert(C); - assert_eq!("{A, C}", e.to_string()); + assert!(format!("{:?}", e) == "EnumSet {A, C}"); } #[test] diff --git a/src/libcollections/lib.rs b/src/libcollections/lib.rs index 5bf5f78af94c2..6eab36d8844df 100644 --- a/src/libcollections/lib.rs +++ b/src/libcollections/lib.rs @@ -22,18 +22,11 @@ html_playground_url = "http://play.rust-lang.org/")] #![allow(unknown_features)] -#![feature(macro_rules, default_type_params, phase, globs)] #![feature(unsafe_destructor, slicing_syntax)] +#![feature(old_impl_check)] #![feature(unboxed_closures)] -#![feature(old_orphan_check)] -#![feature(associated_types)] #![no_std] -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate core; - -#[cfg(not(stage0))] #[macro_use] extern crate core; @@ -41,22 +34,8 @@ extern crate unicode; extern crate alloc; #[cfg(test)] extern crate test; - -#[cfg(all(test, stage0))] -#[phase(plugin, link)] -extern crate std; - -#[cfg(all(test, not(stage0)))] -#[macro_use] -extern crate std; - -#[cfg(all(test, stage0))] -#[phase(plugin, link)] -extern crate log; - -#[cfg(all(test, not(stage0)))] -#[macro_use] -extern crate log; +#[cfg(test)] #[macro_use] extern crate std; +#[cfg(test)] #[macro_use] extern crate log; pub use binary_heap::BinaryHeap; pub use bitv::Bitv; @@ -73,8 +52,7 @@ pub use vec_map::VecMap; // Needed for the vec! macro pub use alloc::boxed; -#[cfg_attr(stage0, macro_escape)] -#[cfg_attr(not(stage0), macro_use)] +#[macro_use] mod macros; pub mod binary_heap; @@ -123,7 +101,9 @@ mod std { pub use core::option; // necessary for panic!() pub use core::clone; // deriving(Clone) pub use core::cmp; // deriving(Eq, Ord, etc.) - pub use core::kinds; // deriving(Copy) + #[cfg(stage0)] + pub use core::marker as kinds; + pub use core::marker; // deriving(Copy) pub use core::hash; // deriving(Hash) } @@ -138,7 +118,7 @@ mod prelude { pub use core::iter::{FromIterator, Extend, IteratorExt}; pub use core::iter::{Iterator, DoubleEndedIterator, RandomAccessIterator}; pub use core::iter::{ExactSizeIterator}; - pub use core::kinds::{Copy, Send, Sized, Sync}; + pub use core::marker::{Copy, Send, Sized, Sync}; pub use core::mem::drop; pub use core::ops::{Drop, Fn, FnMut, FnOnce}; pub use core::option::Option; diff --git a/src/libcollections/macros.rs b/src/libcollections/macros.rs index 0c5929e8661d6..68e2482964dbb 100644 --- a/src/libcollections/macros.rs +++ b/src/libcollections/macros.rs @@ -8,21 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -/// Creates a `std::vec::Vec` containing the arguments. -// NOTE: remove after the next snapshot -#[cfg(stage0)] -macro_rules! vec { - ($($e:expr),*) => ({ - // leading _ to allow empty construction without a warning. - let mut _temp = ::vec::Vec::new(); - $(_temp.push($e);)* - _temp - }); - ($($e:expr),+,) => (vec!($($e),+)) -} - /// Creates a `Vec` containing the arguments. -#[cfg(not(stage0))] #[macro_export] macro_rules! vec { ($($x:expr),*) => ({ diff --git a/src/libcollections/ring_buf.rs b/src/libcollections/ring_buf.rs index 11775f62b1c54..42c17136a0882 100644 --- a/src/libcollections/ring_buf.rs +++ b/src/libcollections/ring_buf.rs @@ -20,7 +20,7 @@ use core::cmp::Ordering; use core::default::Default; use core::fmt; use core::iter::{self, repeat, FromIterator, RandomAccessIterator}; -use core::kinds::marker; +use core::marker; use core::mem; use core::num::{Int, UnsignedInt}; use core::ops::{Index, IndexMut}; @@ -525,7 +525,7 @@ impl RingBuf { /// *num = *num - 2; /// } /// let b: &[_] = &[&mut 3, &mut 1, &mut 2]; - /// assert_eq!(buf.iter_mut().collect::>()[], b); + /// assert_eq!(&buf.iter_mut().collect::>()[], b); /// ``` #[stable] pub fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T> { @@ -556,7 +556,7 @@ impl RingBuf { let buf = self.buffer_as_slice(); if contiguous { let (empty, buf) = buf.split_at(0); - (buf[self.tail..self.head], empty) + (buf.index(&(self.tail..self.head)), empty) } else { let (mid, right) = buf.split_at(self.tail); let (left, _) = mid.split_at(self.head); @@ -1613,11 +1613,11 @@ impl Extend for RingBuf { #[stable] impl fmt::Show for RingBuf { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - try!(write!(f, "[")); + try!(write!(f, "RingBuf [")); for (i, e) in self.iter().enumerate() { if i != 0 { try!(write!(f, ", ")); } - try!(write!(f, "{}", *e)); + try!(write!(f, "{:?}", *e)); } write!(f, "]") @@ -1648,21 +1648,15 @@ mod tests { assert_eq!(d.len(), 3u); d.push_back(137); assert_eq!(d.len(), 4u); - debug!("{}", d.front()); assert_eq!(*d.front().unwrap(), 42); - debug!("{}", d.back()); assert_eq!(*d.back().unwrap(), 137); let mut i = d.pop_front(); - debug!("{}", i); assert_eq!(i, Some(42)); i = d.pop_back(); - debug!("{}", i); assert_eq!(i, Some(137)); i = d.pop_back(); - debug!("{}", i); assert_eq!(i, Some(137)); i = d.pop_back(); - debug!("{}", i); assert_eq!(i, Some(17)); assert_eq!(d.len(), 0u); d.push_back(3); @@ -2308,12 +2302,12 @@ mod tests { #[test] fn test_show() { let ringbuf: RingBuf = range(0i, 10).collect(); - assert!(format!("{}", ringbuf) == "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"); + assert_eq!(format!("{:?}", ringbuf), "RingBuf [0i, 1i, 2i, 3i, 4i, 5i, 6i, 7i, 8i, 9i]"); let ringbuf: RingBuf<&str> = vec!["just", "one", "test", "more"].iter() .map(|&s| s) .collect(); - assert!(format!("{}", ringbuf) == "[just, one, test, more]"); + assert_eq!(format!("{:?}", ringbuf), "RingBuf [\"just\", \"one\", \"test\", \"more\"]"); } #[test] diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index 9e5aa7d645ba0..582887ac38a53 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -55,7 +55,7 @@ //! #![feature(slicing_syntax)] //! fn main() { //! let numbers = [0i, 1i, 2i]; -//! let last_numbers = numbers[1..3]; +//! let last_numbers = numbers.index(&(1..3)); //! // last_numbers is now &[1i, 2i] //! } //! ``` @@ -95,10 +95,10 @@ use core::cmp::Ordering::{self, Greater, Less}; use core::cmp::{self, Ord, PartialEq}; use core::iter::{Iterator, IteratorExt}; use core::iter::{range, range_step, MultiplicativeIterator}; -use core::kinds::Sized; +use core::marker::Sized; use core::mem::size_of; use core::mem; -use core::ops::{FnMut, SliceMut}; +use core::ops::{FnMut, FullRange, Index, IndexMut}; use core::option::Option::{self, Some, None}; use core::ptr::PtrExt; use core::ptr; @@ -1065,12 +1065,12 @@ impl ElementSwaps { #[unstable = "trait is unstable"] impl BorrowFrom> for [T] { - fn borrow_from(owned: &Vec) -> &[T] { owned[] } + fn borrow_from(owned: &Vec) -> &[T] { owned.index(&FullRange) } } #[unstable = "trait is unstable"] impl BorrowFromMut> for [T] { - fn borrow_from_mut(owned: &mut Vec) -> &mut [T] { owned.as_mut_slice_() } + fn borrow_from_mut(owned: &mut Vec) -> &mut [T] { owned.index_mut(&FullRange) } } #[unstable = "trait is unstable"] @@ -1393,15 +1393,20 @@ fn merge_sort(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order #[cfg(test)] mod tests { - use prelude::{Some, None, range, Vec, ToString, Clone, Greater, Less, Equal}; - use prelude::{SliceExt, Iterator, IteratorExt}; - use prelude::AsSlice; - use prelude::{RandomAccessIterator, Ord, SliceConcatExt}; + use core::cmp::Ordering::{Greater, Less, Equal}; + use core::prelude::{Some, None, range, Clone}; + use core::prelude::{Iterator, IteratorExt}; + use core::prelude::{AsSlice}; + use core::prelude::{Ord, FullRange}; use core::default::Default; use core::mem; + use core::ops::Index; + use std::iter::RandomAccessIterator; use std::rand::{Rng, thread_rng}; use std::rc::Rc; - use super::ElementSwaps; + use string::ToString; + use vec::Vec; + use super::{ElementSwaps, SliceConcatExt, SliceExt}; fn square(n: uint) -> uint { n * n } @@ -1606,7 +1611,7 @@ mod tests { // Test on stack. let vec_stack: &[_] = &[1i, 2, 3]; - let v_b = vec_stack[1u..3u].to_vec(); + let v_b = vec_stack.index(&(1u..3u)).to_vec(); assert_eq!(v_b.len(), 2u); let v_b = v_b.as_slice(); assert_eq!(v_b[0], 2); @@ -1614,7 +1619,7 @@ mod tests { // Test `Box<[T]>` let vec_unique = vec![1i, 2, 3, 4, 5, 6]; - let v_d = vec_unique[1u..6u].to_vec(); + let v_d = vec_unique.index(&(1u..6u)).to_vec(); assert_eq!(v_d.len(), 5u); let v_d = v_d.as_slice(); assert_eq!(v_d[0], 2); @@ -1627,21 +1632,21 @@ mod tests { #[test] fn test_slice_from() { let vec: &[int] = &[1, 2, 3, 4]; - assert_eq!(vec[0..], vec); + assert_eq!(vec.index(&(0..)), vec); let b: &[int] = &[3, 4]; - assert_eq!(vec[2..], b); + assert_eq!(vec.index(&(2..)), b); let b: &[int] = &[]; - assert_eq!(vec[4..], b); + assert_eq!(vec.index(&(4..)), b); } #[test] fn test_slice_to() { let vec: &[int] = &[1, 2, 3, 4]; - assert_eq!(vec[..4], vec); + assert_eq!(vec.index(&(0..4)), vec); let b: &[int] = &[1, 2]; - assert_eq!(vec[..2], b); + assert_eq!(vec.index(&(0..2)), b); let b: &[int] = &[]; - assert_eq!(vec[..0], b); + assert_eq!(vec.index(&(0..0)), b); } @@ -2466,25 +2471,25 @@ mod tests { macro_rules! test_show_vec { ($x:expr, $x_str:expr) => ({ let (x, x_str) = ($x, $x_str); - assert_eq!(format!("{}", x), x_str); - assert_eq!(format!("{}", x.as_slice()), x_str); + assert_eq!(format!("{:?}", x), x_str); + assert_eq!(format!("{:?}", x.as_slice()), x_str); }) } let empty: Vec = vec![]; test_show_vec!(empty, "[]"); - test_show_vec!(vec![1i], "[1]"); - test_show_vec!(vec![1i, 2, 3], "[1, 2, 3]"); + test_show_vec!(vec![1i], "[1i]"); + test_show_vec!(vec![1i, 2, 3], "[1i, 2i, 3i]"); test_show_vec!(vec![vec![], vec![1u], vec![1u, 1u]], - "[[], [1], [1, 1]]"); + "[[], [1u], [1u, 1u]]"); let empty_mut: &mut [int] = &mut[]; test_show_vec!(empty_mut, "[]"); let v: &mut[int] = &mut[1]; - test_show_vec!(v, "[1]"); + test_show_vec!(v, "[1i]"); let v: &mut[int] = &mut[1, 2, 3]; - test_show_vec!(v, "[1, 2, 3]"); + test_show_vec!(v, "[1i, 2i, 3i]"); let v: &mut [&mut[uint]] = &mut[&mut[], &mut[1u], &mut[1u, 1u]]; - test_show_vec!(v, "[[], [1], [1, 1]]"); + test_show_vec!(v, "[[], [1u], [1u, 1u]]"); } #[test] @@ -2567,7 +2572,7 @@ mod tests { } assert_eq!(cnt, 3); - for f in v[1..3].iter() { + for f in v.index(&(1..3)).iter() { assert!(*f == Foo); cnt += 1; } diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs index c0482702ccdb6..09d140067f451 100644 --- a/src/libcollections/str.rs +++ b/src/libcollections/str.rs @@ -60,7 +60,7 @@ use core::char::CharExt; use core::clone::Clone; use core::iter::AdditiveIterator; use core::iter::{range, Iterator, IteratorExt}; -use core::ops; +use core::ops::{FullRange, Index}; use core::option::Option::{self, Some, None}; use core::slice::AsSlice; use core::str as core_str; @@ -386,7 +386,7 @@ macro_rules! utf8_acc_cont_byte { #[unstable = "trait is unstable"] impl BorrowFrom for str { - fn borrow_from(owned: &String) -> &str { owned[] } + fn borrow_from(owned: &String) -> &str { owned.index(&FullRange) } } #[unstable = "trait is unstable"] @@ -408,7 +408,7 @@ Section: Trait implementations /// Any string that can be represented as a slice. #[stable] -pub trait StrExt: ops::Slice { +pub trait StrExt: Index { /// Escapes each char in `s` with `char::escape_default`. #[unstable = "return type may change to be an iterator"] fn escape_default(&self) -> String { @@ -464,7 +464,7 @@ pub trait StrExt: ops::Slice { #[unstable = "this functionality may be moved to libunicode"] fn nfd_chars<'a>(&'a self) -> Decompositions<'a> { Decompositions { - iter: self[].chars(), + iter: self.index(&FullRange).chars(), buffer: Vec::new(), sorted: false, kind: Canonical @@ -477,7 +477,7 @@ pub trait StrExt: ops::Slice { #[unstable = "this functionality may be moved to libunicode"] fn nfkd_chars<'a>(&'a self) -> Decompositions<'a> { Decompositions { - iter: self[].chars(), + iter: self.index(&FullRange).chars(), buffer: Vec::new(), sorted: false, kind: Compatible @@ -525,7 +525,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn contains(&self, pat: &str) -> bool { - core_str::StrExt::contains(self[], pat) + core_str::StrExt::contains(self.index(&FullRange), pat) } /// Returns true if a string contains a char pattern. @@ -541,7 +541,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "might get removed in favour of a more generic contains()"] fn contains_char(&self, pat: P) -> bool { - core_str::StrExt::contains_char(self[], pat) + core_str::StrExt::contains_char(self.index(&FullRange), pat) } /// An iterator over the characters of `self`. Note, this iterates @@ -555,7 +555,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn chars(&self) -> Chars { - core_str::StrExt::chars(self[]) + core_str::StrExt::chars(self.index(&FullRange)) } /// An iterator over the bytes of `self` @@ -568,13 +568,13 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn bytes(&self) -> Bytes { - core_str::StrExt::bytes(self[]) + core_str::StrExt::bytes(self.index(&FullRange)) } /// An iterator over the characters of `self` and their byte offsets. #[stable] fn char_indices(&self) -> CharIndices { - core_str::StrExt::char_indices(self[]) + core_str::StrExt::char_indices(self.index(&FullRange)) } /// An iterator over substrings of `self`, separated by characters @@ -597,7 +597,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn split(&self, pat: P) -> Split

{ - core_str::StrExt::split(self[], pat) + core_str::StrExt::split(self.index(&FullRange), pat) } /// An iterator over substrings of `self`, separated by characters @@ -624,7 +624,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn splitn(&self, count: uint, pat: P) -> SplitN

{ - core_str::StrExt::splitn(self[], count, pat) + core_str::StrExt::splitn(self.index(&FullRange), count, pat) } /// An iterator over substrings of `self`, separated by characters @@ -653,7 +653,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "might get removed"] fn split_terminator(&self, pat: P) -> SplitTerminator

{ - core_str::StrExt::split_terminator(self[], pat) + core_str::StrExt::split_terminator(self.index(&FullRange), pat) } /// An iterator over substrings of `self`, separated by characters @@ -674,7 +674,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn rsplitn(&self, count: uint, pat: P) -> RSplitN

{ - core_str::StrExt::rsplitn(self[], count, pat) + core_str::StrExt::rsplitn(self.index(&FullRange), count, pat) } /// An iterator over the start and end indices of the disjoint @@ -699,7 +699,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "might have its iterator type changed"] fn match_indices<'a>(&'a self, pat: &'a str) -> MatchIndices<'a> { - core_str::StrExt::match_indices(self[], pat) + core_str::StrExt::match_indices(self.index(&FullRange), pat) } /// An iterator over the substrings of `self` separated by the pattern `sep`. @@ -715,7 +715,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "might get removed in the future in favor of a more generic split()"] fn split_str<'a>(&'a self, pat: &'a str) -> SplitStr<'a> { - core_str::StrExt::split_str(self[], pat) + core_str::StrExt::split_str(self.index(&FullRange), pat) } /// An iterator over the lines of a string (subsequences separated @@ -731,7 +731,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn lines(&self) -> Lines { - core_str::StrExt::lines(self[]) + core_str::StrExt::lines(self.index(&FullRange)) } /// An iterator over the lines of a string, separated by either @@ -747,7 +747,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn lines_any(&self) -> LinesAny { - core_str::StrExt::lines_any(self[]) + core_str::StrExt::lines_any(self.index(&FullRange)) } /// Returns a slice of the given string from the byte range @@ -782,7 +782,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "use slice notation [a..b] instead"] fn slice(&self, begin: uint, end: uint) -> &str { - core_str::StrExt::slice(self[], begin, end) + core_str::StrExt::slice(self.index(&FullRange), begin, end) } /// Returns a slice of the string from `begin` to its end. @@ -795,7 +795,7 @@ pub trait StrExt: ops::Slice { /// See also `slice`, `slice_to` and `slice_chars`. #[unstable = "use slice notation [a..] instead"] fn slice_from(&self, begin: uint) -> &str { - core_str::StrExt::slice_from(self[], begin) + core_str::StrExt::slice_from(self.index(&FullRange), begin) } /// Returns a slice of the string from the beginning to byte @@ -809,7 +809,7 @@ pub trait StrExt: ops::Slice { /// See also `slice`, `slice_from` and `slice_chars`. #[unstable = "use slice notation [0..a] instead"] fn slice_to(&self, end: uint) -> &str { - core_str::StrExt::slice_to(self[], end) + core_str::StrExt::slice_to(self.index(&FullRange), end) } /// Returns a slice of the string from the character range @@ -837,7 +837,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "may have yet to prove its worth"] fn slice_chars(&self, begin: uint, end: uint) -> &str { - core_str::StrExt::slice_chars(self[], begin, end) + core_str::StrExt::slice_chars(self.index(&FullRange), begin, end) } /// Takes a bytewise (not UTF-8) slice from a string. @@ -848,7 +848,7 @@ pub trait StrExt: ops::Slice { /// the entire slice as well. #[stable] unsafe fn slice_unchecked(&self, begin: uint, end: uint) -> &str { - core_str::StrExt::slice_unchecked(self[], begin, end) + core_str::StrExt::slice_unchecked(self.index(&FullRange), begin, end) } /// Returns true if the pattern `pat` is a prefix of the string. @@ -860,7 +860,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn starts_with(&self, pat: &str) -> bool { - core_str::StrExt::starts_with(self[], pat) + core_str::StrExt::starts_with(self.index(&FullRange), pat) } /// Returns true if the pattern `pat` is a suffix of the string. @@ -872,7 +872,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn ends_with(&self, pat: &str) -> bool { - core_str::StrExt::ends_with(self[], pat) + core_str::StrExt::ends_with(self.index(&FullRange), pat) } /// Returns a string with all pre- and suffixes that match @@ -892,7 +892,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn trim_matches(&self, pat: P) -> &str { - core_str::StrExt::trim_matches(self[], pat) + core_str::StrExt::trim_matches(self.index(&FullRange), pat) } /// Returns a string with all prefixes that match @@ -912,7 +912,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn trim_left_matches(&self, pat: P) -> &str { - core_str::StrExt::trim_left_matches(self[], pat) + core_str::StrExt::trim_left_matches(self.index(&FullRange), pat) } /// Returns a string with all suffixes that match @@ -932,7 +932,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn trim_right_matches(&self, pat: P) -> &str { - core_str::StrExt::trim_right_matches(self[], pat) + core_str::StrExt::trim_right_matches(self.index(&FullRange), pat) } /// Check that `index`-th byte lies at the start and/or end of a @@ -960,7 +960,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "naming is uncertain with container conventions"] fn is_char_boundary(&self, index: uint) -> bool { - core_str::StrExt::is_char_boundary(self[], index) + core_str::StrExt::is_char_boundary(self.index(&FullRange), index) } /// Pluck a character out of a string and return the index of the next @@ -1018,7 +1018,7 @@ pub trait StrExt: ops::Slice { /// If `i` is not the index of the beginning of a valid UTF-8 character. #[unstable = "naming is uncertain with container conventions"] fn char_range_at(&self, start: uint) -> CharRange { - core_str::StrExt::char_range_at(self[], start) + core_str::StrExt::char_range_at(self.index(&FullRange), start) } /// Given a byte position and a str, return the previous char and its position. @@ -1033,7 +1033,7 @@ pub trait StrExt: ops::Slice { /// If `i` is not an index following a valid UTF-8 character. #[unstable = "naming is uncertain with container conventions"] fn char_range_at_reverse(&self, start: uint) -> CharRange { - core_str::StrExt::char_range_at_reverse(self[], start) + core_str::StrExt::char_range_at_reverse(self.index(&FullRange), start) } /// Plucks the character starting at the `i`th byte of a string. @@ -1053,7 +1053,7 @@ pub trait StrExt: ops::Slice { /// If `i` is not the index of the beginning of a valid UTF-8 character. #[unstable = "naming is uncertain with container conventions"] fn char_at(&self, i: uint) -> char { - core_str::StrExt::char_at(self[], i) + core_str::StrExt::char_at(self.index(&FullRange), i) } /// Plucks the character ending at the `i`th byte of a string. @@ -1064,7 +1064,7 @@ pub trait StrExt: ops::Slice { /// If `i` is not an index following a valid UTF-8 character. #[unstable = "naming is uncertain with container conventions"] fn char_at_reverse(&self, i: uint) -> char { - core_str::StrExt::char_at_reverse(self[], i) + core_str::StrExt::char_at_reverse(self.index(&FullRange), i) } /// Work with the byte buffer of a string as a byte slice. @@ -1076,7 +1076,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn as_bytes(&self) -> &[u8] { - core_str::StrExt::as_bytes(self[]) + core_str::StrExt::as_bytes(self.index(&FullRange)) } /// Returns the byte index of the first character of `self` that @@ -1104,7 +1104,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn find(&self, pat: P) -> Option { - core_str::StrExt::find(self[], pat) + core_str::StrExt::find(self.index(&FullRange), pat) } /// Returns the byte index of the last character of `self` that @@ -1132,7 +1132,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn rfind(&self, pat: P) -> Option { - core_str::StrExt::rfind(self[], pat) + core_str::StrExt::rfind(self.index(&FullRange), pat) } /// Returns the byte index of the first matching substring @@ -1156,7 +1156,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "might get removed in favor of a more generic find in the future"] fn find_str(&self, needle: &str) -> Option { - core_str::StrExt::find_str(self[], needle) + core_str::StrExt::find_str(self.index(&FullRange), needle) } /// Retrieves the first character from a string slice and returns @@ -1179,7 +1179,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "awaiting conventions about shifting and slices"] fn slice_shift_char(&self) -> Option<(char, &str)> { - core_str::StrExt::slice_shift_char(self[]) + core_str::StrExt::slice_shift_char(self.index(&FullRange)) } /// Returns the byte offset of an inner slice relative to an enclosing outer slice. @@ -1198,7 +1198,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "awaiting convention about comparability of arbitrary slices"] fn subslice_offset(&self, inner: &str) -> uint { - core_str::StrExt::subslice_offset(self[], inner) + core_str::StrExt::subslice_offset(self.index(&FullRange), inner) } /// Return an unsafe pointer to the strings buffer. @@ -1209,13 +1209,13 @@ pub trait StrExt: ops::Slice { #[stable] #[inline] fn as_ptr(&self) -> *const u8 { - core_str::StrExt::as_ptr(self[]) + core_str::StrExt::as_ptr(self.index(&FullRange)) } /// Return an iterator of `u16` over the string encoded as UTF-16. #[unstable = "this functionality may only be provided by libunicode"] fn utf16_units(&self) -> Utf16Units { - Utf16Units { encoder: Utf16Encoder::new(self[].chars()) } + Utf16Units { encoder: Utf16Encoder::new(self.index(&FullRange).chars()) } } /// Return the number of bytes in this string @@ -1229,7 +1229,7 @@ pub trait StrExt: ops::Slice { #[stable] #[inline] fn len(&self) -> uint { - core_str::StrExt::len(self[]) + core_str::StrExt::len(self.index(&FullRange)) } /// Returns true if this slice contains no bytes @@ -1242,7 +1242,7 @@ pub trait StrExt: ops::Slice { #[inline] #[stable] fn is_empty(&self) -> bool { - core_str::StrExt::is_empty(self[]) + core_str::StrExt::is_empty(self.index(&FullRange)) } /// Parse this string into the specified type. @@ -1256,7 +1256,7 @@ pub trait StrExt: ops::Slice { #[inline] #[unstable = "this method was just created"] fn parse(&self) -> Option { - core_str::StrExt::parse(self[]) + core_str::StrExt::parse(self.index(&FullRange)) } /// Returns an iterator over the @@ -1280,7 +1280,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "this functionality may only be provided by libunicode"] fn graphemes(&self, is_extended: bool) -> Graphemes { - UnicodeStr::graphemes(self[], is_extended) + UnicodeStr::graphemes(self.index(&FullRange), is_extended) } /// Returns an iterator over the grapheme clusters of self and their byte offsets. @@ -1295,7 +1295,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "this functionality may only be provided by libunicode"] fn grapheme_indices(&self, is_extended: bool) -> GraphemeIndices { - UnicodeStr::grapheme_indices(self[], is_extended) + UnicodeStr::grapheme_indices(self.index(&FullRange), is_extended) } /// An iterator over the words of a string (subsequences separated @@ -1311,7 +1311,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn words(&self) -> Words { - UnicodeStr::words(self[]) + UnicodeStr::words(self.index(&FullRange)) } /// Returns a string's displayed width in columns, treating control @@ -1325,25 +1325,25 @@ pub trait StrExt: ops::Slice { /// `is_cjk` = `false`) if the locale is unknown. #[unstable = "this functionality may only be provided by libunicode"] fn width(&self, is_cjk: bool) -> uint { - UnicodeStr::width(self[], is_cjk) + UnicodeStr::width(self.index(&FullRange), is_cjk) } /// Returns a string with leading and trailing whitespace removed. #[stable] fn trim(&self) -> &str { - UnicodeStr::trim(self[]) + UnicodeStr::trim(self.index(&FullRange)) } /// Returns a string with leading whitespace removed. #[stable] fn trim_left(&self) -> &str { - UnicodeStr::trim_left(self[]) + UnicodeStr::trim_left(self.index(&FullRange)) } /// Returns a string with trailing whitespace removed. #[stable] fn trim_right(&self) -> &str { - UnicodeStr::trim_right(self[]) + UnicodeStr::trim_right(self.index(&FullRange)) } } @@ -2133,7 +2133,7 @@ mod tests { let mut bytes = [0u8; 4]; for c in range(0u32, 0x110000).filter_map(|c| ::core::char::from_u32(c)) { let len = c.encode_utf8(&mut bytes).unwrap_or(0); - let s = ::core::str::from_utf8(bytes[..len]).unwrap(); + let s = ::core::str::from_utf8(&bytes[..len]).unwrap(); if Some(c) != s.chars().next() { panic!("character {:x}={} does not decode correctly", c as u32, c); } @@ -2145,7 +2145,7 @@ mod tests { let mut bytes = [0u8; 4]; for c in range(0u32, 0x110000).filter_map(|c| ::core::char::from_u32(c)) { let len = c.encode_utf8(&mut bytes).unwrap_or(0); - let s = ::core::str::from_utf8(bytes[..len]).unwrap(); + let s = ::core::str::from_utf8(&bytes[..len]).unwrap(); if Some(c) != s.chars().rev().next() { panic!("character {:x}={} does not decode correctly", c as u32, c); } diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index 0bf311e4d3f6e..59418f50e3c7d 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -22,7 +22,7 @@ use core::fmt; use core::hash; use core::iter::FromIterator; use core::mem; -use core::ops::{self, Deref, Add}; +use core::ops::{self, Deref, Add, Index}; use core::ptr; use core::raw::Slice as RawSlice; use unicode::str as unicode_str; @@ -168,7 +168,7 @@ impl String { if i > 0 { unsafe { - res.as_mut_vec().push_all(v[..i]) + res.as_mut_vec().push_all(v.index(&(0..i))) }; } @@ -185,7 +185,7 @@ impl String { macro_rules! error { () => ({ unsafe { if subseqidx != i_ { - res.as_mut_vec().push_all(v[subseqidx..i_]); + res.as_mut_vec().push_all(v.index(&(subseqidx..i_))); } subseqidx = i; res.as_mut_vec().push_all(REPLACEMENT); @@ -254,7 +254,7 @@ impl String { } if subseqidx < total { unsafe { - res.as_mut_vec().push_all(v[subseqidx..total]) + res.as_mut_vec().push_all(v.index(&(subseqidx..total))) }; } Cow::Owned(res) @@ -677,13 +677,25 @@ impl FromUtf8Error { impl fmt::Show for FromUtf8Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.error.fmt(f) + fmt::String::fmt(self, f) + } +} + +impl fmt::String for FromUtf8Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(&self.error, f) } } impl fmt::Show for FromUtf16Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - "invalid utf-16: lone surrogate found".fmt(f) + fmt::String::fmt(self, f) + } +} + +impl fmt::String for FromUtf16Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt("invalid utf-16: lone surrogate found", f) } } @@ -793,10 +805,17 @@ impl Default for String { } } -#[experimental = "waiting on Show stabilization"] +#[experimental = "waiting on fmt stabilization"] +impl fmt::String for String { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(&**self, f) + } +} + +#[experimental = "waiting on fmt stabilization"] impl fmt::Show for String { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - (**self).fmt(f) + fmt::Show::fmt(&**self, f) } } @@ -818,25 +837,32 @@ impl<'a> Add<&'a str> for String { } } -impl ops::Slice for String { +impl ops::Index> for String { + type Output = str; #[inline] - fn as_slice_<'a>(&'a self) -> &'a str { - unsafe { mem::transmute(self.vec.as_slice()) } + fn index(&self, index: &ops::Range) -> &str { + &self.index(&FullRange)[*index] } - +} +impl ops::Index> for String { + type Output = str; #[inline] - fn slice_from_or_fail<'a>(&'a self, from: &uint) -> &'a str { - self[][*from..] + fn index(&self, index: &ops::RangeTo) -> &str { + &self.index(&FullRange)[*index] } - +} +impl ops::Index> for String { + type Output = str; #[inline] - fn slice_to_or_fail<'a>(&'a self, to: &uint) -> &'a str { - self[][..*to] + fn index(&self, index: &ops::RangeFrom) -> &str { + &self.index(&FullRange)[*index] } - +} +impl ops::Index for String { + type Output = str; #[inline] - fn slice_or_fail<'a>(&'a self, from: &uint, to: &uint) -> &'a str { - self[][*from..*to] + fn index(&self, _index: &ops::FullRange) -> &str { + unsafe { mem::transmute(self.vec.as_slice()) } } } @@ -845,7 +871,7 @@ impl ops::Deref for String { type Target = str; fn deref<'a>(&'a self) -> &'a str { - unsafe { mem::transmute(self.vec[]) } + unsafe { mem::transmute(self.vec.index(&FullRange)) } } } @@ -895,6 +921,7 @@ pub trait ToString { fn to_string(&self) -> String; } +#[cfg(stage0)] impl ToString for T { fn to_string(&self) -> String { use core::fmt::Writer; @@ -905,6 +932,17 @@ impl ToString for T { } } +#[cfg(not(stage0))] +impl ToString for T { + fn to_string(&self) -> String { + use core::fmt::Writer; + let mut buf = String::new(); + let _ = buf.write_fmt(format_args!("{}", self)); + buf.shrink_to_fit(); + buf + } +} + impl IntoCow<'static, String, str> for String { fn into_cow(self) -> CowString<'static> { Cow::Owned(self) @@ -943,6 +981,7 @@ mod tests { use str::Utf8Error; use core::iter::repeat; use super::{as_string, CowString}; + use core::ops::FullRange; #[test] fn test_as_string() { @@ -1224,10 +1263,10 @@ mod tests { #[test] fn test_slicing() { let s = "foobar".to_string(); - assert_eq!("foobar", s[]); - assert_eq!("foo", s[..3]); - assert_eq!("bar", s[3..]); - assert_eq!("oob", s[1..4]); + assert_eq!("foobar", &s[]); + assert_eq!("foo", &s[..3]); + assert_eq!("bar", &s[3..]); + assert_eq!("oob", &s[1..4]); } #[test] diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index 99231e7253c3c..5fc3fafac9e22 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -55,7 +55,7 @@ use core::default::Default; use core::fmt; use core::hash::{self, Hash}; use core::iter::{repeat, FromIterator}; -use core::kinds::marker::{ContravariantLifetime, InvariantType}; +use core::marker::{ContravariantLifetime, InvariantType}; use core::mem; use core::nonzero::NonZero; use core::num::{Int, UnsignedInt}; @@ -1178,7 +1178,7 @@ impl Clone for Vec { // self.len <= other.len due to the truncate above, so the // slice here is always in-bounds. - let slice = other[self.len()..]; + let slice = other.index(&(self.len()..)); self.push_all(slice); } } @@ -1209,48 +1209,66 @@ impl IndexMut for Vec { } } -impl ops::Slice for Vec { + +impl ops::Index> for Vec { + type Output = [T]; #[inline] - fn as_slice_<'a>(&'a self) -> &'a [T] { - self.as_slice() + fn index(&self, index: &ops::Range) -> &[T] { + self.as_slice().index(index) } - +} +impl ops::Index> for Vec { + type Output = [T]; #[inline] - fn slice_from_or_fail<'a>(&'a self, start: &uint) -> &'a [T] { - self.as_slice().slice_from_or_fail(start) + fn index(&self, index: &ops::RangeTo) -> &[T] { + self.as_slice().index(index) } - +} +impl ops::Index> for Vec { + type Output = [T]; #[inline] - fn slice_to_or_fail<'a>(&'a self, end: &uint) -> &'a [T] { - self.as_slice().slice_to_or_fail(end) + fn index(&self, index: &ops::RangeFrom) -> &[T] { + self.as_slice().index(index) } +} +impl ops::Index for Vec { + type Output = [T]; #[inline] - fn slice_or_fail<'a>(&'a self, start: &uint, end: &uint) -> &'a [T] { - self.as_slice().slice_or_fail(start, end) + fn index(&self, _index: &ops::FullRange) -> &[T] { + self.as_slice() } } -impl ops::SliceMut for Vec { +impl ops::IndexMut> for Vec { + type Output = [T]; #[inline] - fn as_mut_slice_<'a>(&'a mut self) -> &'a mut [T] { - self.as_mut_slice() + fn index_mut(&mut self, index: &ops::Range) -> &mut [T] { + self.as_mut_slice().index_mut(index) } - +} +impl ops::IndexMut> for Vec { + type Output = [T]; #[inline] - fn slice_from_or_fail_mut<'a>(&'a mut self, start: &uint) -> &'a mut [T] { - self.as_mut_slice().slice_from_or_fail_mut(start) + fn index_mut(&mut self, index: &ops::RangeTo) -> &mut [T] { + self.as_mut_slice().index_mut(index) } - +} +impl ops::IndexMut> for Vec { + type Output = [T]; #[inline] - fn slice_to_or_fail_mut<'a>(&'a mut self, end: &uint) -> &'a mut [T] { - self.as_mut_slice().slice_to_or_fail_mut(end) + fn index_mut(&mut self, index: &ops::RangeFrom) -> &mut [T] { + self.as_mut_slice().index_mut(index) } +} +impl ops::IndexMut for Vec { + type Output = [T]; #[inline] - fn slice_or_fail_mut<'a>(&'a mut self, start: &uint, end: &uint) -> &'a mut [T] { - self.as_mut_slice().slice_or_fail_mut(start, end) + fn index_mut(&mut self, _index: &ops::FullRange) -> &mut [T] { + self.as_mut_slice() } } + #[stable] impl ops::Deref for Vec { type Target = [T]; @@ -1430,9 +1448,25 @@ impl Default for Vec { } #[experimental = "waiting on Show stability"] -impl fmt::Show for Vec { +impl fmt::Show for Vec { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Show::fmt(self.as_slice(), f) + } +} + +#[cfg(stage0)] +#[experimental = "waiting on Show stability"] +impl fmt::String for Vec { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self.as_slice(), f) + } +} + +#[cfg(not(stage0))] +#[experimental = "waiting on Show stability"] +impl fmt::String for Vec { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.as_slice().fmt(f) + fmt::String::fmt(self.as_slice(), f) } } @@ -1781,6 +1815,7 @@ mod tests { use prelude::*; use core::mem::size_of; use core::iter::repeat; + use core::ops::FullRange; use test::Bencher; use super::as_vec; @@ -1918,7 +1953,7 @@ mod tests { let (left, right) = values.split_at_mut(2); { let left: &[_] = left; - assert!(left[0..left.len()] == [1, 2][]); + assert!(&left[..left.len()] == &[1, 2][]); } for p in left.iter_mut() { *p += 1; @@ -1926,7 +1961,7 @@ mod tests { { let right: &[_] = right; - assert!(right[0..right.len()] == [3, 4, 5][]); + assert!(&right[..right.len()] == &[3, 4, 5][]); } for p in right.iter_mut() { *p += 2; @@ -2097,35 +2132,35 @@ mod tests { #[should_fail] fn test_slice_out_of_bounds_1() { let x: Vec = vec![1, 2, 3, 4, 5]; - x[-1..]; + &x[(-1)..]; } #[test] #[should_fail] fn test_slice_out_of_bounds_2() { let x: Vec = vec![1, 2, 3, 4, 5]; - x[..6]; + &x[..6]; } #[test] #[should_fail] fn test_slice_out_of_bounds_3() { let x: Vec = vec![1, 2, 3, 4, 5]; - x[-1..4]; + &x[(-1)..4]; } #[test] #[should_fail] fn test_slice_out_of_bounds_4() { let x: Vec = vec![1, 2, 3, 4, 5]; - x[1..6]; + &x[1..6]; } #[test] #[should_fail] fn test_slice_out_of_bounds_5() { let x: Vec = vec![1, 2, 3, 4, 5]; - x[3..2]; + &x[3..2]; } #[test] @@ -2371,7 +2406,7 @@ mod tests { b.bytes = src_len as u64; b.iter(|| { - let dst = src.clone().as_slice().to_vec(); + let dst = src.clone()[].to_vec(); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); diff --git a/src/libcollections/vec_map.rs b/src/libcollections/vec_map.rs index cc757b656238e..4399a6fec2274 100644 --- a/src/libcollections/vec_map.rs +++ b/src/libcollections/vec_map.rs @@ -455,7 +455,8 @@ impl VecMap { if *key >= self.v.len() { return None; } - self.v[*key].take() + let result = &mut self.v[*key]; + result.take() } } @@ -488,11 +489,11 @@ impl Ord for VecMap { #[stable] impl fmt::Show for VecMap { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - try!(write!(f, "{{")); + try!(write!(f, "VecMap {{")); for (i, (k, v)) in self.iter().enumerate() { if i != 0 { try!(write!(f, ", ")); } - try!(write!(f, "{}: {}", k, *v)); + try!(write!(f, "{}: {:?}", k, *v)); } write!(f, "}}") @@ -928,9 +929,9 @@ mod test_map { map.insert(1, 2i); map.insert(3, 4i); - let map_str = map.to_string(); - assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}"); - assert_eq!(format!("{}", empty), "{}"); + let map_str = format!("{:?}", map); + assert!(map_str == "VecMap {1: 2i, 3: 4i}" || map_str == "{3: 4i, 1: 2i}"); + assert_eq!(format!("{:?}", empty), "VecMap {}"); } #[test] diff --git a/src/libcore/any.rs b/src/libcore/any.rs index 33cb335d75645..25007bfde93a9 100644 --- a/src/libcore/any.rs +++ b/src/libcore/any.rs @@ -49,7 +49,7 @@ //! println!("String ({}): {}", as_string.len(), as_string); //! } //! None => { -//! println!("{}", value); +//! println!("{:?}", value); //! } //! } //! } diff --git a/src/libcore/array.rs b/src/libcore/array.rs index ba7714ad9bc8c..05db9e11760e3 100644 --- a/src/libcore/array.rs +++ b/src/libcore/array.rs @@ -17,8 +17,8 @@ use clone::Clone; use cmp::{PartialEq, Eq, PartialOrd, Ord, Ordering}; use fmt; -use kinds::Copy; -use ops::Deref; +use marker::Copy; +use ops::{Deref, FullRange, Index}; use option::Option; // macro for implementing n-ary tuple functions and operations @@ -35,7 +35,7 @@ macro_rules! array_impls { #[unstable = "waiting for Show to stabilize"] impl fmt::Show for [T; $N] { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Show::fmt(&self[], f) + fmt::Show::fmt(&self.index(&FullRange), f) } } @@ -43,11 +43,11 @@ macro_rules! array_impls { impl PartialEq<[B; $N]> for [A; $N] where A: PartialEq { #[inline] fn eq(&self, other: &[B; $N]) -> bool { - self[] == other[] + self.index(&FullRange) == other.index(&FullRange) } #[inline] fn ne(&self, other: &[B; $N]) -> bool { - self[] != other[] + self.index(&FullRange) != other.index(&FullRange) } } @@ -57,9 +57,13 @@ macro_rules! array_impls { Rhs: Deref, { #[inline(always)] - fn eq(&self, other: &Rhs) -> bool { PartialEq::eq(self[], &**other) } + fn eq(&self, other: &Rhs) -> bool { + PartialEq::eq(self.index(&FullRange), &**other) + } #[inline(always)] - fn ne(&self, other: &Rhs) -> bool { PartialEq::ne(self[], &**other) } + fn ne(&self, other: &Rhs) -> bool { + PartialEq::ne(self.index(&FullRange), &**other) + } } #[stable] @@ -68,9 +72,13 @@ macro_rules! array_impls { Lhs: Deref { #[inline(always)] - fn eq(&self, other: &[B; $N]) -> bool { PartialEq::eq(&**self, other[]) } + fn eq(&self, other: &[B; $N]) -> bool { + PartialEq::eq(&**self, other.index(&FullRange)) + } #[inline(always)] - fn ne(&self, other: &[B; $N]) -> bool { PartialEq::ne(&**self, other[]) } + fn ne(&self, other: &[B; $N]) -> bool { + PartialEq::ne(&**self, other.index(&FullRange)) + } } #[stable] @@ -80,23 +88,23 @@ macro_rules! array_impls { impl PartialOrd for [T; $N] { #[inline] fn partial_cmp(&self, other: &[T; $N]) -> Option { - PartialOrd::partial_cmp(&self[], &other[]) + PartialOrd::partial_cmp(&self.index(&FullRange), &other.index(&FullRange)) } #[inline] fn lt(&self, other: &[T; $N]) -> bool { - PartialOrd::lt(&self[], &other[]) + PartialOrd::lt(&self.index(&FullRange), &other.index(&FullRange)) } #[inline] fn le(&self, other: &[T; $N]) -> bool { - PartialOrd::le(&self[], &other[]) + PartialOrd::le(&self.index(&FullRange), &other.index(&FullRange)) } #[inline] fn ge(&self, other: &[T; $N]) -> bool { - PartialOrd::ge(&self[], &other[]) + PartialOrd::ge(&self.index(&FullRange), &other.index(&FullRange)) } #[inline] fn gt(&self, other: &[T; $N]) -> bool { - PartialOrd::gt(&self[], &other[]) + PartialOrd::gt(&self.index(&FullRange), &other.index(&FullRange)) } } @@ -104,7 +112,7 @@ macro_rules! array_impls { impl Ord for [T; $N] { #[inline] fn cmp(&self, other: &[T; $N]) -> Ordering { - Ord::cmp(&self[], &other[]) + Ord::cmp(&self.index(&FullRange), &other.index(&FullRange)) } } )+ diff --git a/src/libcore/atomic.rs b/src/libcore/atomic.rs index 15c20253c8bc7..e740a9292528c 100644 --- a/src/libcore/atomic.rs +++ b/src/libcore/atomic.rs @@ -50,7 +50,7 @@ //! let spinlock_clone = spinlock.clone(); //! Thread::spawn(move|| { //! spinlock_clone.store(0, Ordering::SeqCst); -//! }).detach(); +//! }); //! //! // Wait for the other task to release the lock //! while spinlock.load(Ordering::SeqCst) != 0 {} @@ -72,7 +72,7 @@ use self::Ordering::*; -use kinds::Sync; +use marker::Sync; use intrinsics; use cell::UnsafeCell; diff --git a/src/libcore/borrow.rs b/src/libcore/borrow.rs index 2c08b97635580..3163135542296 100644 --- a/src/libcore/borrow.rs +++ b/src/libcore/borrow.rs @@ -47,7 +47,7 @@ use clone::Clone; use cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd}; use fmt; -use kinds::Sized; +use marker::Sized; use ops::Deref; use option::Option; use self::Cow::*; @@ -133,6 +133,7 @@ impl ToOwned for T where T: Clone { /// } /// } /// ``` +//#[deriving(Show)] NOTE(stage0): uncomment after snapshot pub enum Cow<'a, T, B: ?Sized + 'a> where B: ToOwned { /// Borrowed data. Borrowed(&'a B), @@ -141,6 +142,16 @@ pub enum Cow<'a, T, B: ?Sized + 'a> where B: ToOwned { Owned(T) } +//NOTE(stage0): replace with deriving(Show) after snapshot +impl<'a, T, B: ?Sized> fmt::Show for Cow<'a, T, B> where + B: fmt::String + ToOwned, + T: fmt::String +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + #[stable] impl<'a, T, B: ?Sized> Clone for Cow<'a, T, B> where B: ToOwned { fn clone(&self) -> Cow<'a, T, B> { @@ -237,11 +248,14 @@ impl<'a, T, B: ?Sized> PartialOrd for Cow<'a, T, B> where B: PartialOrd + ToOwne } } -impl<'a, T, B: ?Sized> fmt::Show for Cow<'a, T, B> where B: fmt::Show + ToOwned, T: fmt::Show { +impl<'a, T, B: ?Sized> fmt::String for Cow<'a, T, B> where + B: fmt::String + ToOwned, + T: fmt::String, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { - Borrowed(ref b) => fmt::Show::fmt(b, f), - Owned(ref o) => fmt::Show::fmt(o, f), + Borrowed(ref b) => fmt::String::fmt(b, f), + Owned(ref o) => fmt::String::fmt(o, f), } } } diff --git a/src/libcore/cell.rs b/src/libcore/cell.rs index fd18d6ac3f3b4..674364269f1da 100644 --- a/src/libcore/cell.rs +++ b/src/libcore/cell.rs @@ -160,8 +160,7 @@ use clone::Clone; use cmp::PartialEq; use default::Default; -use fmt; -use kinds::{Copy, Send}; +use marker::{Copy, Send}; use ops::{Deref, DerefMut, Drop}; use option::Option; use option::Option::{None, Some}; @@ -364,16 +363,6 @@ impl PartialEq for RefCell { } } -#[unstable] -impl fmt::Show for RefCell { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.try_borrow() { - Some(val) => write!(f, "{}", val), - None => write!(f, "") - } - } -} - struct BorrowRef<'b> { _borrow: &'b Cell, } @@ -520,7 +509,7 @@ impl<'b, T> DerefMut for RefMut<'b, T> { /// /// ```rust /// use std::cell::UnsafeCell; -/// use std::kinds::marker; +/// use std::marker; /// /// struct NotThreadSafe { /// value: UnsafeCell, diff --git a/src/libcore/clone.rs b/src/libcore/clone.rs index 17991659f9789..3149247a83aed 100644 --- a/src/libcore/clone.rs +++ b/src/libcore/clone.rs @@ -21,7 +21,7 @@ #![stable] -use kinds::Sized; +use marker::Sized; /// A common trait for cloning an object. #[stable] diff --git a/src/libcore/cmp.rs b/src/libcore/cmp.rs index af5e98ed30324..c3dfd5f51595f 100644 --- a/src/libcore/cmp.rs +++ b/src/libcore/cmp.rs @@ -43,7 +43,7 @@ use self::Ordering::*; -use kinds::Sized; +use marker::Sized; use option::Option::{self, Some, None}; /// Trait for equality comparisons which are [partial equivalence relations]( @@ -316,7 +316,7 @@ pub fn partial_max(v1: T, v2: T) -> Option { mod impls { use cmp::{PartialOrd, Ord, PartialEq, Eq, Ordering}; use cmp::Ordering::{Less, Greater, Equal}; - use kinds::Sized; + use marker::Sized; use option::Option; use option::Option::{Some, None}; diff --git a/src/libcore/fmt/float.rs b/src/libcore/fmt/float.rs index f63242b4f859a..d833b8fed7779 100644 --- a/src/libcore/fmt/float.rs +++ b/src/libcore/fmt/float.rs @@ -20,7 +20,7 @@ use fmt; use iter::{IteratorExt, range}; use num::{cast, Float, ToPrimitive}; use num::FpCategory as Fp; -use ops::FnOnce; +use ops::{FnOnce, Index}; use result::Result::Ok; use slice::{self, SliceExt}; use str::{self, StrExt}; @@ -332,5 +332,5 @@ pub fn float_to_str_bytes_common( } } - f(unsafe { str::from_utf8_unchecked(buf[..end]) }) + f(unsafe { str::from_utf8_unchecked(buf.index(&(0..end))) }) } diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index 951f5c29f00e8..f9027f19068e4 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -13,22 +13,20 @@ #![allow(unused_variables)] use any; -use cell::{Cell, Ref, RefMut}; +use cell::{Cell, RefCell, Ref, RefMut}; +use char::CharExt; use iter::{Iterator, IteratorExt, range}; -use kinds::{Copy, Sized}; +use marker::{Copy, Sized}; use mem; use option::Option; use option::Option::{Some, None}; -use ops::{Deref, FnOnce}; use result::Result::Ok; +use ops::{Deref, FnOnce, Index}; use result; use slice::SliceExt; use slice; use str::{self, StrExt, Utf8Error}; -// NOTE: for old macros; remove after the next snapshot -#[cfg(stage0)] use result::Result::Err; - pub use self::num::radix; pub use self::num::Radix; pub use self::num::RadixFmt; @@ -216,20 +214,33 @@ pub struct Arguments<'a> { } impl<'a> Show for Arguments<'a> { + fn fmt(&self, fmt: &mut Formatter) -> Result { + String::fmt(self, fmt) + } +} + +impl<'a> String for Arguments<'a> { fn fmt(&self, fmt: &mut Formatter) -> Result { write(fmt.buf, *self) } } -/// When a format is not otherwise specified, types are formatted by ascribing -/// to this trait. There is not an explicit way of selecting this trait to be -/// used for formatting, it is only if no other format is specified. +/// Format trait for the `:?` format. Useful for debugging, most all types +/// should implement this. #[unstable = "I/O and core have yet to be reconciled"] pub trait Show { /// Formats the value using the given formatter. fn fmt(&self, &mut Formatter) -> Result; } +/// When a value can be semantically expressed as a String, this trait may be +/// used. It corresponds to the default format, `{}`. +#[unstable = "I/O and core have yet to be reconciled"] +pub trait String { + /// Formats the value using the given formatter. + fn fmt(&self, &mut Formatter) -> Result; +} + /// Format trait for the `o` character #[unstable = "I/O and core have yet to be reconciled"] @@ -413,7 +424,7 @@ impl<'a> Formatter<'a> { for c in sign.into_iter() { let mut b = [0; 4]; let n = c.encode_utf8(&mut b).unwrap_or(0); - let b = unsafe { str::from_utf8_unchecked(b[0..n]) }; + let b = unsafe { str::from_utf8_unchecked(b.index(&(0..n))) }; try!(f.buf.write_str(b)); } if prefixed { f.buf.write_str(prefix) } @@ -521,7 +532,7 @@ impl<'a> Formatter<'a> { let mut fill = [0u8; 4]; let len = self.fill.encode_utf8(&mut fill).unwrap_or(0); - let fill = unsafe { str::from_utf8_unchecked(fill[..len]) }; + let fill = unsafe { str::from_utf8_unchecked(fill.index(&(..len))) }; for _ in range(0, pre_pad) { try!(self.buf.write_str(fill)); @@ -572,7 +583,7 @@ impl<'a> Formatter<'a> { impl Show for Error { fn fmt(&self, f: &mut Formatter) -> Result { - "an error occurred when formatting an argument".fmt(f) + String::fmt("an error occurred when formatting an argument", f) } } @@ -595,33 +606,86 @@ pub fn argumentuint<'a>(s: &'a uint) -> Argument<'a> { // Implementations of the core formatting traits -impl<'a, T: ?Sized + Show> Show for &'a T { - fn fmt(&self, f: &mut Formatter) -> Result { (**self).fmt(f) } -} -impl<'a, T: ?Sized + Show> Show for &'a mut T { - fn fmt(&self, f: &mut Formatter) -> Result { (**self).fmt(f) } +macro_rules! fmt_refs { + ($($tr:ident),*) => { + $( + impl<'a, T: ?Sized + $tr> $tr for &'a T { + fn fmt(&self, f: &mut Formatter) -> Result { $tr::fmt(&**self, f) } + } + impl<'a, T: ?Sized + $tr> $tr for &'a mut T { + fn fmt(&self, f: &mut Formatter) -> Result { $tr::fmt(&**self, f) } + } + )* + } } +fmt_refs! { Show, String, Octal, Binary, LowerHex, UpperHex, LowerExp, UpperExp } + impl Show for bool { fn fmt(&self, f: &mut Formatter) -> Result { - Show::fmt(if *self { "true" } else { "false" }, f) + String::fmt(self, f) + } +} + +impl String for bool { + fn fmt(&self, f: &mut Formatter) -> Result { + String::fmt(if *self { "true" } else { "false" }, f) + } +} + +#[cfg(stage0)] +//NOTE(stage0): remove impl after snapshot +impl Show for str { + fn fmt(&self, f: &mut Formatter) -> Result { + String::fmt(self, f) } } +#[cfg(not(stage0))] +//NOTE(stage0): remove cfg after snapshot impl Show for str { + fn fmt(&self, f: &mut Formatter) -> Result { + try!(write!(f, "\"")); + for c in self.chars().flat_map(|c| c.escape_default()) { + try!(write!(f, "{}", c)); + } + write!(f, "\"") + } +} + +impl String for str { fn fmt(&self, f: &mut Formatter) -> Result { f.pad(self) } } +#[cfg(stage0)] +//NOTE(stage0): remove impl after snapshot +impl Show for char { + fn fmt(&self, f: &mut Formatter) -> Result { + String::fmt(self, f) + } +} + +#[cfg(not(stage0))] +//NOTE(stage0): remove cfg after snapshot impl Show for char { fn fmt(&self, f: &mut Formatter) -> Result { use char::CharExt; + try!(write!(f, "'")); + for c in self.escape_default() { + try!(write!(f, "{}", c)); + } + write!(f, "'") + } +} +impl String for char { + fn fmt(&self, f: &mut Formatter) -> Result { let mut utf8 = [0u8; 4]; let amt = self.encode_utf8(&mut utf8).unwrap_or(0); - let s: &str = unsafe { mem::transmute(utf8[..amt]) }; - Show::fmt(s, f) + let s: &str = unsafe { mem::transmute(utf8.index(&(0..amt))) }; + String::fmt(s, f) } } @@ -653,7 +717,15 @@ impl<'a, T> Pointer for &'a mut T { } macro_rules! floating { ($ty:ident) => { + impl Show for $ty { + fn fmt(&self, fmt: &mut Formatter) -> Result { + try!(String::fmt(self, fmt)); + fmt.write_str(stringify!($ty)) + } + } + + impl String for $ty { fn fmt(&self, fmt: &mut Formatter) -> Result { use num::Float; @@ -724,10 +796,15 @@ floating! { f64 } impl Show for *const T { fn fmt(&self, f: &mut Formatter) -> Result { Pointer::fmt(self, f) } } - +impl String for *const T { + fn fmt(&self, f: &mut Formatter) -> Result { Pointer::fmt(self, f) } +} impl Show for *mut T { fn fmt(&self, f: &mut Formatter) -> Result { Pointer::fmt(self, f) } } +impl String for *mut T { + fn fmt(&self, f: &mut Formatter) -> Result { Pointer::fmt(self, f) } +} macro_rules! peel { ($name:ident, $($other:ident,)*) => (tuple! { $($other,)* }) @@ -746,7 +823,7 @@ macro_rules! tuple { if n > 0 { try!(write!(f, ", ")); } - try!(write!(f, "{}", *$name)); + try!(write!(f, "{:?}", *$name)); n += 1; )* if n == 1 { @@ -766,6 +843,49 @@ impl<'a> Show for &'a (any::Any+'a) { } impl Show for [T] { + fn fmt(&self, f: &mut Formatter) -> Result { + if f.flags & (1 << (rt::FlagAlternate as uint)) == 0 { + try!(write!(f, "[")); + } + let mut is_first = true; + for x in self.iter() { + if is_first { + is_first = false; + } else { + try!(write!(f, ", ")); + } + try!(write!(f, "{:?}", *x)) + } + if f.flags & (1 << (rt::FlagAlternate as uint)) == 0 { + try!(write!(f, "]")); + } + Ok(()) + } +} + +#[cfg(stage0)] +impl String for [T] { + fn fmt(&self, f: &mut Formatter) -> Result { + if f.flags & (1 << (rt::FlagAlternate as uint)) == 0 { + try!(write!(f, "[")); + } + let mut is_first = true; + for x in self.iter() { + if is_first { + is_first = false; + } else { + try!(write!(f, ", ")); + } + try!(write!(f, "{}", *x)) + } + if f.flags & (1 << (rt::FlagAlternate as uint)) == 0 { + try!(write!(f, "]")); + } + Ok(()) + } +} +#[cfg(not(stage0))] +impl String for [T] { fn fmt(&self, f: &mut Formatter) -> Result { if f.flags & (1 << (rt::FlagAlternate as uint)) == 0 { try!(write!(f, "[")); @@ -792,25 +912,41 @@ impl Show for () { } } +impl String for () { + fn fmt(&self, f: &mut Formatter) -> Result { + f.pad("()") + } +} + impl Show for Cell { fn fmt(&self, f: &mut Formatter) -> Result { - write!(f, "Cell {{ value: {} }}", self.get()) + write!(f, "Cell {{ value: {:?} }}", self.get()) + } +} + +#[unstable] +impl Show for RefCell { + fn fmt(&self, f: &mut Formatter) -> Result { + match self.try_borrow() { + Some(val) => write!(f, "RefCell {{ value: {:?} }}", val), + None => write!(f, "RefCell {{ }}") + } } } impl<'b, T: Show> Show for Ref<'b, T> { fn fmt(&self, f: &mut Formatter) -> Result { - (**self).fmt(f) + Show::fmt(&**self, f) } } impl<'b, T: Show> Show for RefMut<'b, T> { fn fmt(&self, f: &mut Formatter) -> Result { - (*(self.deref())).fmt(f) + Show::fmt(&*(self.deref()), f) } } -impl Show for Utf8Error { +impl String for Utf8Error { fn fmt(&self, f: &mut Formatter) -> Result { match *self { Utf8Error::InvalidByte(n) => { diff --git a/src/libcore/fmt/num.rs b/src/libcore/fmt/num.rs index e0724fc2da5f5..17149aed3dbab 100644 --- a/src/libcore/fmt/num.rs +++ b/src/libcore/fmt/num.rs @@ -16,6 +16,7 @@ use fmt; use iter::IteratorExt; +use ops::Index; use num::{Int, cast}; use slice::SliceExt; use str; @@ -61,7 +62,7 @@ trait GenericRadix { if x == zero { break }; // No more digits left to accumulate. } } - let buf = unsafe { str::from_utf8_unchecked(buf[curr..]) }; + let buf = unsafe { str::from_utf8_unchecked(buf.index(&(curr..))) }; f.pad_integral(is_positive, self.prefix(), buf) } } @@ -153,8 +154,22 @@ pub fn radix(x: T, base: u8) -> RadixFmt { } macro_rules! radix_fmt { - ($T:ty as $U:ty, $fmt:ident) => { + ($T:ty as $U:ty, $fmt:ident, $S:expr) => { + #[cfg(stage0)] impl fmt::Show for RadixFmt<$T, Radix> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } + } + + #[cfg(not(stage0))] + impl fmt::Show for RadixFmt<$T, Radix> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + try!(fmt::String::fmt(self, f)); + f.write_str($S) + } + } + impl fmt::String for RadixFmt<$T, Radix> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { RadixFmt(ref x, radix) => radix.$fmt(*x as $U, f) } } @@ -170,24 +185,48 @@ macro_rules! int_base { } } } + +macro_rules! show { + ($T:ident with $S:expr) => { + #[cfg(stage0)] + impl fmt::Show for $T { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } + } + + #[cfg(not(stage0))] + impl fmt::Show for $T { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + try!(fmt::String::fmt(self, f)); + f.write_str($S) + } + } + } +} macro_rules! integer { ($Int:ident, $Uint:ident) => { - int_base! { Show for $Int as $Int -> Decimal } + integer! { $Int, $Uint, stringify!($Int), stringify!($Uint) } + }; + ($Int:ident, $Uint:ident, $SI:expr, $SU:expr) => { + int_base! { String for $Int as $Int -> Decimal } int_base! { Binary for $Int as $Uint -> Binary } int_base! { Octal for $Int as $Uint -> Octal } int_base! { LowerHex for $Int as $Uint -> LowerHex } int_base! { UpperHex for $Int as $Uint -> UpperHex } - radix_fmt! { $Int as $Int, fmt_int } + radix_fmt! { $Int as $Int, fmt_int, $SI } + show! { $Int with $SI } - int_base! { Show for $Uint as $Uint -> Decimal } + int_base! { String for $Uint as $Uint -> Decimal } int_base! { Binary for $Uint as $Uint -> Binary } int_base! { Octal for $Uint as $Uint -> Octal } int_base! { LowerHex for $Uint as $Uint -> LowerHex } int_base! { UpperHex for $Uint as $Uint -> UpperHex } - radix_fmt! { $Uint as $Uint, fmt_int } + radix_fmt! { $Uint as $Uint, fmt_int, $SU } + show! { $Uint with $SU } } } -integer! { int, uint } +integer! { int, uint, "i", "u" } integer! { i8, u8 } integer! { i16, u16 } integer! { i32, u32 } diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 7e1359d5c1201..822416a387e63 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -202,7 +202,6 @@ extern "rust-intrinsic" { /// crate it is invoked in. pub fn type_id() -> TypeId; - /// Create a value initialized to zero. /// /// `init` is unsafe because it returns a zeroed-out datum, diff --git a/src/libcore/iter.rs b/src/libcore/iter.rs index e5753f6cc2e78..d30cfc405a1ff 100644 --- a/src/libcore/iter.rs +++ b/src/libcore/iter.rs @@ -67,7 +67,7 @@ use num::{ToPrimitive, Int}; use ops::{Add, Deref, FnMut}; use option::Option; use option::Option::{Some, None}; -use std::kinds::Sized; +use std::marker::Sized; use uint; /// An interface for dealing with "external iterators". These types of iterators @@ -142,7 +142,7 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn last(mut self) -> Option< ::Item> { + fn last(mut self) -> Option { let mut last = None; for x in self { last = Some(x); } last @@ -161,7 +161,7 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn nth(&mut self, mut n: uint) -> Option< ::Item> { + fn nth(&mut self, mut n: uint) -> Option { for x in *self { if n == 0 { return Some(x) } n -= 1; @@ -186,7 +186,7 @@ pub trait IteratorExt: Iterator + Sized { #[inline] #[stable] fn chain(self, other: U) -> Chain where - U: Iterator::Item>, + U: Iterator, { Chain{a: self, b: other, flag: false} } @@ -228,8 +228,8 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn map(self, f: F) -> Map< ::Item, B, Self, F> where - F: FnMut(::Item) -> B, + fn map(self, f: F) -> Map where + F: FnMut(Self::Item) -> B, { Map{iter: self, f: f} } @@ -248,8 +248,8 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn filter

(self, predicate: P) -> Filter< ::Item, Self, P> where - P: FnMut(&::Item) -> bool, + fn filter

(self, predicate: P) -> Filter where + P: FnMut(&Self::Item) -> bool, { Filter{iter: self, predicate: predicate} } @@ -268,8 +268,8 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn filter_map(self, f: F) -> FilterMap< ::Item, B, Self, F> where - F: FnMut(::Item) -> Option, + fn filter_map(self, f: F) -> FilterMap where + F: FnMut(Self::Item) -> Option, { FilterMap { iter: self, f: f } } @@ -312,7 +312,7 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn peekable(self) -> Peekable< ::Item, Self> { + fn peekable(self) -> Peekable { Peekable{iter: self, peeked: None} } @@ -332,8 +332,8 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn skip_while

(self, predicate: P) -> SkipWhile< ::Item, Self, P> where - P: FnMut(&::Item) -> bool, + fn skip_while

(self, predicate: P) -> SkipWhile where + P: FnMut(&Self::Item) -> bool, { SkipWhile{iter: self, flag: false, predicate: predicate} } @@ -353,8 +353,8 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn take_while

(self, predicate: P) -> TakeWhile< ::Item, Self, P> where - P: FnMut(&::Item) -> bool, + fn take_while

(self, predicate: P) -> TakeWhile where + P: FnMut(&Self::Item) -> bool, { TakeWhile{iter: self, flag: false, predicate: predicate} } @@ -422,8 +422,8 @@ pub trait IteratorExt: Iterator + Sized { self, initial_state: St, f: F, - ) -> Scan< ::Item, B, Self, St, F> where - F: FnMut(&mut St, ::Item) -> Option, + ) -> Scan where + F: FnMut(&mut St, Self::Item) -> Option, { Scan{iter: self, f: f, state: initial_state} } @@ -448,9 +448,9 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn flat_map(self, f: F) -> FlatMap< ::Item, B, Self, U, F> where + fn flat_map(self, f: F) -> FlatMap where U: Iterator, - F: FnMut(::Item) -> U, + F: FnMut(Self::Item) -> U, { FlatMap{iter: self, f: f, frontiter: None, backiter: None } } @@ -508,8 +508,8 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn inspect(self, f: F) -> Inspect< ::Item, Self, F> where - F: FnMut(&::Item), + fn inspect(self, f: F) -> Inspect where + F: FnMut(&Self::Item), { Inspect{iter: self, f: f} } @@ -546,7 +546,7 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn collect::Item>>(self) -> B { + fn collect>(self) -> B { FromIterator::from_iter(self) } @@ -563,8 +563,8 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[unstable = "recently added as part of collections reform"] fn partition(mut self, mut f: F) -> (B, B) where - B: Default + Extend< ::Item>, - F: FnMut(&::Item) -> bool + B: Default + Extend, + F: FnMut(&Self::Item) -> bool { let mut left: B = Default::default(); let mut right: B = Default::default(); @@ -592,7 +592,7 @@ pub trait IteratorExt: Iterator + Sized { #[inline] #[stable] fn fold(mut self, init: B, mut f: F) -> B where - F: FnMut(B, ::Item) -> B, + F: FnMut(B, Self::Item) -> B, { let mut accum = init; for x in self { @@ -612,7 +612,7 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn all(mut self, mut f: F) -> bool where F: FnMut(::Item) -> bool { + fn all(mut self, mut f: F) -> bool where F: FnMut(Self::Item) -> bool { for x in self { if !f(x) { return false; } } true } @@ -630,7 +630,7 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn any(&mut self, mut f: F) -> bool where F: FnMut(::Item) -> bool { + fn any(&mut self, mut f: F) -> bool where F: FnMut(Self::Item) -> bool { for x in *self { if f(x) { return true; } } false } @@ -640,8 +640,8 @@ pub trait IteratorExt: Iterator + Sized { /// Does not consume the iterator past the first found element. #[inline] #[stable] - fn find

(&mut self, mut predicate: P) -> Option< ::Item> where - P: FnMut(&::Item) -> bool, + fn find

(&mut self, mut predicate: P) -> Option where + P: FnMut(&Self::Item) -> bool, { for x in *self { if predicate(&x) { return Some(x) } @@ -653,7 +653,7 @@ pub trait IteratorExt: Iterator + Sized { #[inline] #[stable] fn position

(&mut self, mut predicate: P) -> Option where - P: FnMut(::Item) -> bool, + P: FnMut(Self::Item) -> bool, { let mut i = 0; for x in *self { @@ -671,7 +671,7 @@ pub trait IteratorExt: Iterator + Sized { #[inline] #[stable] fn rposition

(&mut self, mut predicate: P) -> Option where - P: FnMut(::Item) -> bool, + P: FnMut(Self::Item) -> bool, Self: ExactSizeIterator + DoubleEndedIterator { let len = self.len(); @@ -693,8 +693,7 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn max(self) -> Option< ::Item> where - ::Item: Ord + fn max(self) -> Option where Self::Item: Ord { self.fold(None, |max, x| { match max { @@ -714,8 +713,7 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[stable] - fn min(self) -> Option< ::Item> where - ::Item: Ord + fn min(self) -> Option where Self::Item: Ord { self.fold(None, |min, x| { match min { @@ -759,8 +757,7 @@ pub trait IteratorExt: Iterator + Sized { /// assert!(v.iter().min_max() == MinMax(&1, &1)); /// ``` #[unstable = "return type may change"] - fn min_max(mut self) -> MinMaxResult< ::Item> where - ::Item: Ord + fn min_max(mut self) -> MinMaxResult where Self::Item: Ord { let (mut min, mut max) = match self.next() { None => return NoElements, @@ -817,10 +814,10 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[unstable = "may want to produce an Ordering directly; see #15311"] - fn max_by(self, mut f: F) -> Option< ::Item> where - F: FnMut(&::Item) -> B, + fn max_by(self, mut f: F) -> Option where + F: FnMut(&Self::Item) -> B, { - self.fold(None, |max: Option<(::Item, B)>, x| { + self.fold(None, |max: Option<(Self::Item, B)>, x| { let x_val = f(&x); match max { None => Some((x, x_val)), @@ -846,10 +843,10 @@ pub trait IteratorExt: Iterator + Sized { /// ``` #[inline] #[unstable = "may want to produce an Ordering directly; see #15311"] - fn min_by(self, mut f: F) -> Option< ::Item> where - F: FnMut(&::Item) -> B, + fn min_by(self, mut f: F) -> Option where + F: FnMut(&Self::Item) -> B, { - self.fold(None, |min: Option<(::Item, B)>, x| { + self.fold(None, |min: Option<(Self::Item, B)>, x| { let x_val = f(&x); match min { None => Some((x, x_val)), @@ -968,7 +965,7 @@ impl IteratorExt for I where I: Iterator {} #[stable] pub trait DoubleEndedIterator: Iterator { /// Yield an element from the end of the range, returning `None` if the range is empty. - fn next_back(&mut self) -> Option< ::Item>; + fn next_back(&mut self) -> Option; } /// An object implementing random access indexing by `uint` @@ -984,7 +981,7 @@ pub trait RandomAccessIterator: Iterator { fn indexable(&self) -> uint; /// Return an element at an index, or `None` if the index is out of bounds - fn idx(&mut self, index: uint) -> Option< ::Item>; + fn idx(&mut self, index: uint) -> Option; } /// An iterator that knows its exact length @@ -1015,14 +1012,14 @@ pub trait ExactSizeIterator: Iterator { impl ExactSizeIterator for Enumerate where I: ExactSizeIterator {} #[stable] impl ExactSizeIterator for Inspect where - I: ExactSizeIterator + Iterator, + I: ExactSizeIterator, F: FnMut(&A), {} #[stable] impl ExactSizeIterator for Rev where I: ExactSizeIterator + DoubleEndedIterator {} #[stable] impl ExactSizeIterator for Map where - I: ExactSizeIterator + Iterator, + I: ExactSizeIterator, F: FnMut(A) -> B, {} #[stable] @@ -1041,7 +1038,7 @@ impl Iterator for Rev where I: DoubleEndedIterator { type Item = ::Item; #[inline] - fn next(&mut self) -> Option< ::Item> { self.iter.next_back() } + fn next(&mut self) -> Option<::Item> { self.iter.next_back() } #[inline] fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } } @@ -1049,7 +1046,7 @@ impl Iterator for Rev where I: DoubleEndedIterator { #[stable] impl DoubleEndedIterator for Rev where I: DoubleEndedIterator { #[inline] - fn next_back(&mut self) -> Option< ::Item> { self.iter.next() } + fn next_back(&mut self) -> Option<::Item> { self.iter.next() } } #[experimental = "trait is experimental"] @@ -1057,7 +1054,7 @@ impl RandomAccessIterator for Rev where I: DoubleEndedIterator + RandomAcc #[inline] fn indexable(&self) -> uint { self.iter.indexable() } #[inline] - fn idx(&mut self, index: uint) -> Option< ::Item> { + fn idx(&mut self, index: uint) -> Option<::Item> { let amt = self.indexable(); self.iter.idx(amt - index - 1) } @@ -1075,7 +1072,7 @@ impl<'a, I> Iterator for ByRef<'a, I> where I: 'a + Iterator { type Item = ::Item; #[inline] - fn next(&mut self) -> Option< ::Item> { self.iter.next() } + fn next(&mut self) -> Option<::Item> { self.iter.next() } #[inline] fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } } @@ -1083,7 +1080,7 @@ impl<'a, I> Iterator for ByRef<'a, I> where I: 'a + Iterator { #[stable] impl<'a, I> DoubleEndedIterator for ByRef<'a, I> where I: 'a + DoubleEndedIterator { #[inline] - fn next_back(&mut self) -> Option< ::Item> { self.iter.next_back() } + fn next_back(&mut self) -> Option<::Item> { self.iter.next_back() } } /// A trait for iterators over elements which can be added together @@ -1244,7 +1241,7 @@ impl Iterator for Cloned where impl DoubleEndedIterator for Cloned where T: Clone, D: Deref, - I: DoubleEndedIterator + Iterator, + I: DoubleEndedIterator, { fn next_back(&mut self) -> Option { self.it.next_back().cloned() @@ -1255,7 +1252,7 @@ impl DoubleEndedIterator for Cloned where impl ExactSizeIterator for Cloned where T: Clone, D: Deref, - I: ExactSizeIterator + Iterator, + I: ExactSizeIterator, {} /// An iterator that repeats endlessly @@ -1272,7 +1269,7 @@ impl Iterator for Cycle where I: Clone + Iterator { type Item = ::Item; #[inline] - fn next(&mut self) -> Option< ::Item> { + fn next(&mut self) -> Option<::Item> { match self.iter.next() { None => { self.iter = self.orig.clone(); self.iter.next() } y => y @@ -1304,7 +1301,7 @@ impl RandomAccessIterator for Cycle where } #[inline] - fn idx(&mut self, index: uint) -> Option< ::Item> { + fn idx(&mut self, index: uint) -> Option<::Item> { let liter = self.iter.indexable(); let lorig = self.orig.indexable(); if lorig == 0 { @@ -1363,8 +1360,8 @@ impl Iterator for Chain where A: Iterator, B: Iterator DoubleEndedIterator for Chain where - A: DoubleEndedIterator + Iterator, - B: DoubleEndedIterator + Iterator, + A: DoubleEndedIterator, + B: DoubleEndedIterator, { #[inline] fn next_back(&mut self) -> Option { @@ -1377,8 +1374,8 @@ impl DoubleEndedIterator for Chain where #[experimental = "trait is experimental"] impl RandomAccessIterator for Chain where - A: RandomAccessIterator + Iterator, - B: RandomAccessIterator + Iterator, + A: RandomAccessIterator, + B: RandomAccessIterator, { #[inline] fn indexable(&self) -> uint { @@ -1444,8 +1441,8 @@ impl Iterator for Zip where #[stable] impl DoubleEndedIterator for Zip where - A: ExactSizeIterator + Iterator + DoubleEndedIterator, - B: ExactSizeIterator + Iterator + DoubleEndedIterator, + A: DoubleEndedIterator + ExactSizeIterator, + B: DoubleEndedIterator + ExactSizeIterator, { #[inline] fn next_back(&mut self) -> Option<(T, U)> { @@ -1469,8 +1466,8 @@ impl DoubleEndedIterator for Zip where #[experimental = "trait is experimental"] impl RandomAccessIterator for Zip where - A: RandomAccessIterator + Iterator, - B: RandomAccessIterator + Iterator, + A: RandomAccessIterator, + B: RandomAccessIterator, { #[inline] fn indexable(&self) -> uint { @@ -1539,7 +1536,7 @@ impl Iterator for Map where I: Iterator, F: FnMu #[stable] impl DoubleEndedIterator for Map where - I: DoubleEndedIterator + Iterator, + I: DoubleEndedIterator, F: FnMut(A) -> B, { #[inline] @@ -1551,7 +1548,7 @@ impl DoubleEndedIterator for Map where #[experimental = "trait is experimental"] impl RandomAccessIterator for Map where - I: RandomAccessIterator + Iterator, + I: RandomAccessIterator, F: FnMut(A) -> B, { #[inline] @@ -1613,7 +1610,7 @@ impl Iterator for Filter where I: Iterator, P: FnMut(& #[stable] impl DoubleEndedIterator for Filter where - I: DoubleEndedIterator + Iterator, + I: DoubleEndedIterator, P: FnMut(&A) -> bool, { #[inline] @@ -1676,7 +1673,7 @@ impl Iterator for FilterMap where #[stable] impl DoubleEndedIterator for FilterMap where - I: DoubleEndedIterator + Iterator, + I: DoubleEndedIterator, F: FnMut(A) -> Option, { #[inline] @@ -1925,7 +1922,7 @@ impl Iterator for Skip where I: Iterator { type Item = ::Item; #[inline] - fn next(&mut self) -> Option< ::Item> { + fn next(&mut self) -> Option<::Item> { let mut next = self.iter.next(); if self.n == 0 { next @@ -1972,7 +1969,7 @@ impl RandomAccessIterator for Skip where I: RandomAccessIterator{ } #[inline] - fn idx(&mut self, index: uint) -> Option< ::Item> { + fn idx(&mut self, index: uint) -> Option<::Item> { if index >= self.indexable() { None } else { @@ -1995,7 +1992,7 @@ impl Iterator for Take where I: Iterator{ type Item = ::Item; #[inline] - fn next(&mut self) -> Option< ::Item> { + fn next(&mut self) -> Option<::Item> { if self.n != 0 { self.n -= 1; self.iter.next() @@ -2027,7 +2024,7 @@ impl RandomAccessIterator for Take where I: RandomAccessIterator{ } #[inline] - fn idx(&mut self, index: uint) -> Option< ::Item> { + fn idx(&mut self, index: uint) -> Option<::Item> { if index >= self.n { None } else { @@ -2153,8 +2150,8 @@ impl Iterator for FlatMap where #[stable] impl DoubleEndedIterator for FlatMap where - I: DoubleEndedIterator + Iterator, - U: DoubleEndedIterator + Iterator, + I: DoubleEndedIterator, + U: DoubleEndedIterator, F: FnMut(A) -> U, { #[inline] @@ -2189,7 +2186,7 @@ impl Iterator for Fuse where I: Iterator { type Item = ::Item; #[inline] - fn next(&mut self) -> Option< ::Item> { + fn next(&mut self) -> Option<::Item> { if self.done { None } else { @@ -2216,7 +2213,7 @@ impl Iterator for Fuse where I: Iterator { #[stable] impl DoubleEndedIterator for Fuse where I: DoubleEndedIterator { #[inline] - fn next_back(&mut self) -> Option< ::Item> { + fn next_back(&mut self) -> Option<::Item> { if self.done { None } else { @@ -2240,7 +2237,7 @@ impl RandomAccessIterator for Fuse where I: RandomAccessIterator { } #[inline] - fn idx(&mut self, index: uint) -> Option< ::Item> { + fn idx(&mut self, index: uint) -> Option<::Item> { self.iter.idx(index) } } @@ -2308,7 +2305,7 @@ impl Iterator for Inspect where I: Iterator, F: FnMut( #[stable] impl DoubleEndedIterator for Inspect where - I: DoubleEndedIterator + Iterator, + I: DoubleEndedIterator, F: FnMut(&A), { #[inline] @@ -2320,7 +2317,7 @@ impl DoubleEndedIterator for Inspect where #[experimental = "trait is experimental"] impl RandomAccessIterator for Inspect where - I: RandomAccessIterator + Iterator, + I: RandomAccessIterator, F: FnMut(&A), { #[inline] diff --git a/src/libcore/kinds.rs b/src/libcore/kinds.rs deleted file mode 100644 index 5d69938fccff7..0000000000000 --- a/src/libcore/kinds.rs +++ /dev/null @@ -1,298 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Primitive traits representing basic 'kinds' of types -//! -//! Rust types can be classified in various useful ways according to -//! intrinsic properties of the type. These classifications, often called -//! 'kinds', are represented as traits. -//! -//! They cannot be implemented by user code, but are instead implemented -//! by the compiler automatically for the types to which they apply. - -/// Types able to be transferred across task boundaries. -#[lang="send"] -pub unsafe trait Send : 'static { - // empty. -} - -/// Types with a constant size known at compile-time. -#[lang="sized"] -pub trait Sized { - // Empty. -} - -/// Types that can be copied by simply copying bits (i.e. `memcpy`). -#[lang="copy"] -pub trait Copy { - // Empty. -} - -/// Types that can be safely shared between tasks when aliased. -/// -/// The precise definition is: a type `T` is `Sync` if `&T` is -/// thread-safe. In other words, there is no possibility of data races -/// when passing `&T` references between tasks. -/// -/// As one would expect, primitive types like `u8` and `f64` are all -/// `Sync`, and so are simple aggregate types containing them (like -/// tuples, structs and enums). More instances of basic `Sync` types -/// include "immutable" types like `&T` and those with simple -/// inherited mutability, such as `Box`, `Vec` and most other -/// collection types. (Generic parameters need to be `Sync` for their -/// container to be `Sync`.) -/// -/// A somewhat surprising consequence of the definition is `&mut T` is -/// `Sync` (if `T` is `Sync`) even though it seems that it might -/// provide unsynchronised mutation. The trick is a mutable reference -/// stored in an aliasable reference (that is, `& &mut T`) becomes -/// read-only, as if it were a `& &T`, hence there is no risk of a data -/// race. -/// -/// Types that are not `Sync` are those that have "interior -/// mutability" in a non-thread-safe way, such as `Cell` and `RefCell` -/// in `std::cell`. These types allow for mutation of their contents -/// even when in an immutable, aliasable slot, e.g. the contents of -/// `&Cell` can be `.set`, and do not ensure data races are -/// impossible, hence they cannot be `Sync`. A higher level example -/// of a non-`Sync` type is the reference counted pointer -/// `std::rc::Rc`, because any reference `&Rc` can clone a new -/// reference, which modifies the reference counts in a non-atomic -/// way. -/// -/// For cases when one does need thread-safe interior mutability, -/// types like the atomics in `std::sync` and `Mutex` & `RWLock` in -/// the `sync` crate do ensure that any mutation cannot cause data -/// races. Hence these types are `Sync`. -/// -/// Users writing their own types with interior mutability (or anything -/// else that is not thread-safe) should use the `NoSync` marker type -/// (from `std::kinds::marker`) to ensure that the compiler doesn't -/// consider the user-defined type to be `Sync`. Any types with -/// interior mutability must also use the `std::cell::UnsafeCell` wrapper -/// around the value(s) which can be mutated when behind a `&` -/// reference; not doing this is undefined behaviour (for example, -/// `transmute`-ing from `&T` to `&mut T` is illegal). -#[lang="sync"] -pub unsafe trait Sync { - // Empty -} - -/// Marker types are special types that are used with unsafe code to -/// inform the compiler of special constraints. Marker types should -/// only be needed when you are creating an abstraction that is -/// implemented using unsafe code. In that case, you may want to embed -/// some of the marker types below into your type. -pub mod marker { - use super::{Copy,Sized}; - use clone::Clone; - - /// A marker type whose type parameter `T` is considered to be - /// covariant with respect to the type itself. This is (typically) - /// used to indicate that an instance of the type `T` is being stored - /// into memory and read from, even though that may not be apparent. - /// - /// For more information about variance, refer to this Wikipedia - /// article . - /// - /// *Note:* It is very unusual to have to add a covariant constraint. - /// If you are not sure, you probably want to use `InvariantType`. - /// - /// # Example - /// - /// Given a struct `S` that includes a type parameter `T` - /// but does not actually *reference* that type parameter: - /// - /// ```ignore - /// use std::mem; - /// - /// struct S { x: *() } - /// fn get(s: &S) -> T { - /// unsafe { - /// let x: *T = mem::transmute(s.x); - /// *x - /// } - /// } - /// ``` - /// - /// The type system would currently infer that the value of - /// the type parameter `T` is irrelevant, and hence a `S` is - /// a subtype of `S>` (or, for that matter, `S` for - /// any `U`). But this is incorrect because `get()` converts the - /// `*()` into a `*T` and reads from it. Therefore, we should include the - /// a marker field `CovariantType` to inform the type checker that - /// `S` is a subtype of `S` if `T` is a subtype of `U` - /// (for example, `S<&'static int>` is a subtype of `S<&'a int>` - /// for some lifetime `'a`, but not the other way around). - #[lang="covariant_type"] - #[derive(PartialEq, Eq, PartialOrd, Ord)] - pub struct CovariantType; - - impl Copy for CovariantType {} - impl Clone for CovariantType { - fn clone(&self) -> CovariantType { *self } - } - - /// A marker type whose type parameter `T` is considered to be - /// contravariant with respect to the type itself. This is (typically) - /// used to indicate that an instance of the type `T` will be consumed - /// (but not read from), even though that may not be apparent. - /// - /// For more information about variance, refer to this Wikipedia - /// article . - /// - /// *Note:* It is very unusual to have to add a contravariant constraint. - /// If you are not sure, you probably want to use `InvariantType`. - /// - /// # Example - /// - /// Given a struct `S` that includes a type parameter `T` - /// but does not actually *reference* that type parameter: - /// - /// ``` - /// use std::mem; - /// - /// struct S { x: *const () } - /// fn get(s: &S, v: T) { - /// unsafe { - /// let x: fn(T) = mem::transmute(s.x); - /// x(v) - /// } - /// } - /// ``` - /// - /// The type system would currently infer that the value of - /// the type parameter `T` is irrelevant, and hence a `S` is - /// a subtype of `S>` (or, for that matter, `S` for - /// any `U`). But this is incorrect because `get()` converts the - /// `*()` into a `fn(T)` and then passes a value of type `T` to it. - /// - /// Supplying a `ContravariantType` marker would correct the - /// problem, because it would mark `S` so that `S` is only a - /// subtype of `S` if `U` is a subtype of `T`; given that the - /// function requires arguments of type `T`, it must also accept - /// arguments of type `U`, hence such a conversion is safe. - #[lang="contravariant_type"] - #[derive(PartialEq, Eq, PartialOrd, Ord)] - pub struct ContravariantType; - - impl Copy for ContravariantType {} - impl Clone for ContravariantType { - fn clone(&self) -> ContravariantType { *self } - } - - /// A marker type whose type parameter `T` is considered to be - /// invariant with respect to the type itself. This is (typically) - /// used to indicate that instances of the type `T` may be read or - /// written, even though that may not be apparent. - /// - /// For more information about variance, refer to this Wikipedia - /// article . - /// - /// # Example - /// - /// The Cell type is an example which uses unsafe code to achieve - /// "interior" mutability: - /// - /// ``` - /// pub struct Cell { value: T } - /// # fn main() {} - /// ``` - /// - /// The type system would infer that `value` is only read here and - /// never written, but in fact `Cell` uses unsafe code to achieve - /// interior mutability. - #[lang="invariant_type"] - #[derive(PartialEq, Eq, PartialOrd, Ord)] - pub struct InvariantType; - - impl Copy for InvariantType {} - impl Clone for InvariantType { - fn clone(&self) -> InvariantType { *self } - } - - /// As `CovariantType`, but for lifetime parameters. Using - /// `CovariantLifetime<'a>` indicates that it is ok to substitute - /// a *longer* lifetime for `'a` than the one you originally - /// started with (e.g., you could convert any lifetime `'foo` to - /// `'static`). You almost certainly want `ContravariantLifetime` - /// instead, or possibly `InvariantLifetime`. The only case where - /// it would be appropriate is that you have a (type-casted, and - /// hence hidden from the type system) function pointer with a - /// signature like `fn(&'a T)` (and no other uses of `'a`). In - /// this case, it is ok to substitute a larger lifetime for `'a` - /// (e.g., `fn(&'static T)`), because the function is only - /// becoming more selective in terms of what it accepts as - /// argument. - /// - /// For more information about variance, refer to this Wikipedia - /// article . - #[lang="covariant_lifetime"] - #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] - pub struct CovariantLifetime<'a>; - - /// As `ContravariantType`, but for lifetime parameters. Using - /// `ContravariantLifetime<'a>` indicates that it is ok to - /// substitute a *shorter* lifetime for `'a` than the one you - /// originally started with (e.g., you could convert `'static` to - /// any lifetime `'foo`). This is appropriate for cases where you - /// have an unsafe pointer that is actually a pointer into some - /// memory with lifetime `'a`, and thus you want to limit the - /// lifetime of your data structure to `'a`. An example of where - /// this is used is the iterator for vectors. - /// - /// For more information about variance, refer to this Wikipedia - /// article . - #[lang="contravariant_lifetime"] - #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] - pub struct ContravariantLifetime<'a>; - - /// As `InvariantType`, but for lifetime parameters. Using - /// `InvariantLifetime<'a>` indicates that it is not ok to - /// substitute any other lifetime for `'a` besides its original - /// value. This is appropriate for cases where you have an unsafe - /// pointer that is actually a pointer into memory with lifetime `'a`, - /// and this pointer is itself stored in an inherently mutable - /// location (such as a `Cell`). - #[lang="invariant_lifetime"] - #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] - pub struct InvariantLifetime<'a>; - - /// A type which is considered "not sendable", meaning that it cannot - /// be safely sent between tasks, even if it is owned. This is - /// typically embedded in other types, such as `Gc`, to ensure that - /// their instances remain thread-local. - #[lang="no_send_bound"] - #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] - pub struct NoSend; - - /// A type which is considered "not POD", meaning that it is not - /// implicitly copyable. This is typically embedded in other types to - /// ensure that they are never copied, even if they lack a destructor. - #[lang="no_copy_bound"] - #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] - #[allow(missing_copy_implementations)] - pub struct NoCopy; - - /// A type which is considered "not sync", meaning that - /// its contents are not threadsafe, hence they cannot be - /// shared between tasks. - #[lang="no_sync_bound"] - #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] - pub struct NoSync; - - /// A type which is considered managed by the GC. This is typically - /// embedded in other types. - #[lang="managed_bound"] - #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] - #[allow(missing_copy_implementations)] - pub struct Managed; -} - diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs index aff0065c52744..a7e3b61b0d42b 100644 --- a/src/libcore/lib.rs +++ b/src/libcore/lib.rs @@ -56,29 +56,25 @@ html_playground_url = "http://play.rust-lang.org/")] #![no_std] -#![allow(unknown_features, raw_pointer_deriving)] -#![feature(globs, intrinsics, lang_items, macro_rules, phase)] +#![allow(unknown_features, raw_pointer_derive)] +#![feature(intrinsics, lang_items)] #![feature(simd, unsafe_destructor, slicing_syntax)] -#![feature(default_type_params, unboxed_closures, associated_types)] +#![feature(unboxed_closures)] #![deny(missing_docs)] -#[cfg_attr(stage0, macro_escape)] -#[cfg_attr(not(stage0), macro_use)] +#[macro_use] mod macros; #[path = "num/float_macros.rs"] -#[cfg_attr(stage0, macro_escape)] -#[cfg_attr(not(stage0), macro_use)] +#[macro_use] mod float_macros; #[path = "num/int_macros.rs"] -#[cfg_attr(stage0, macro_escape)] -#[cfg_attr(not(stage0), macro_use)] +#[macro_use] mod int_macros; #[path = "num/uint_macros.rs"] -#[cfg_attr(stage0, macro_escape)] -#[cfg_attr(not(stage0), macro_use)] +#[macro_use] mod uint_macros; #[path = "num/int.rs"] pub mod int; @@ -111,7 +107,7 @@ pub mod ptr; /* Core language traits */ -pub mod kinds; +pub mod marker; pub mod ops; pub mod cmp; pub mod clone; @@ -150,7 +146,9 @@ mod core { mod std { pub use clone; pub use cmp; - pub use kinds; + #[cfg(stage0)] + pub use marker as kinds; + pub use marker; pub use option; pub use fmt; pub use hash; diff --git a/src/libcore/macros.rs b/src/libcore/macros.rs index fcd93ad4a02ae..bfe88fff22fb1 100644 --- a/src/libcore/macros.rs +++ b/src/libcore/macros.rs @@ -83,7 +83,7 @@ macro_rules! assert_eq { if !((*left_val == *right_val) && (*right_val == *left_val)) { panic!("assertion failed: `(left == right) && (right == left)` \ - (left: `{}`, right: `{}`)", *left_val, *right_val) + (left: `{:?}`, right: `{:?}`)", *left_val, *right_val) } } } @@ -142,16 +142,9 @@ macro_rules! debug_assert_eq { ($($arg:tt)*) => (if cfg!(not(ndebug)) { assert_eq!($($arg)*); }) } -#[cfg(stage0)] -#[macro_export] -macro_rules! try { - ($e:expr) => (match $e { Ok(e) => e, Err(e) => return Err(e) }) -} - /// Short circuiting evaluation on Err /// /// `libstd` contains a more general `try!` macro that uses `FromError`. -#[cfg(not(stage0))] #[macro_export] macro_rules! try { ($e:expr) => ({ @@ -189,8 +182,8 @@ macro_rules! writeln { ($dst:expr, $fmt:expr) => ( write!($dst, concat!($fmt, "\n")) ); - ($dst:expr, $fmt:expr, $($arg:expr),*) => ( - write!($dst, concat!($fmt, "\n"), $($arg,)*) + ($dst:expr, $fmt:expr, $($arg:tt)*) => ( + write!($dst, concat!($fmt, "\n"), $($arg)*) ); } diff --git a/src/libcore/marker.rs b/src/libcore/marker.rs new file mode 100644 index 0000000000000..d400cb47cbff6 --- /dev/null +++ b/src/libcore/marker.rs @@ -0,0 +1,314 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Primitive traits and marker types representing basic 'kinds' of types. +//! +//! Rust types can be classified in various useful ways according to +//! intrinsic properties of the type. These classifications, often called +//! 'kinds', are represented as traits. +//! +//! They cannot be implemented by user code, but are instead implemented +//! by the compiler automatically for the types to which they apply. +//! +//! Marker types are special types that are used with unsafe code to +//! inform the compiler of special constraints. Marker types should +//! only be needed when you are creating an abstraction that is +//! implemented using unsafe code. In that case, you may want to embed +//! some of the marker types below into your type. + +#![stable] + +use clone::Clone; + +/// Types able to be transferred across task boundaries. +#[unstable = "will be overhauled with new lifetime rules; see RFC 458"] +#[lang="send"] +pub unsafe trait Send: 'static { + // empty. +} + +/// Types with a constant size known at compile-time. +#[stable] +#[lang="sized"] +pub trait Sized { + // Empty. +} + +/// Types that can be copied by simply copying bits (i.e. `memcpy`). +#[stable] +#[lang="copy"] +pub trait Copy { + // Empty. +} + +/// Types that can be safely shared between tasks when aliased. +/// +/// The precise definition is: a type `T` is `Sync` if `&T` is +/// thread-safe. In other words, there is no possibility of data races +/// when passing `&T` references between tasks. +/// +/// As one would expect, primitive types like `u8` and `f64` are all +/// `Sync`, and so are simple aggregate types containing them (like +/// tuples, structs and enums). More instances of basic `Sync` types +/// include "immutable" types like `&T` and those with simple +/// inherited mutability, such as `Box`, `Vec` and most other +/// collection types. (Generic parameters need to be `Sync` for their +/// container to be `Sync`.) +/// +/// A somewhat surprising consequence of the definition is `&mut T` is +/// `Sync` (if `T` is `Sync`) even though it seems that it might +/// provide unsynchronised mutation. The trick is a mutable reference +/// stored in an aliasable reference (that is, `& &mut T`) becomes +/// read-only, as if it were a `& &T`, hence there is no risk of a data +/// race. +/// +/// Types that are not `Sync` are those that have "interior +/// mutability" in a non-thread-safe way, such as `Cell` and `RefCell` +/// in `std::cell`. These types allow for mutation of their contents +/// even when in an immutable, aliasable slot, e.g. the contents of +/// `&Cell` can be `.set`, and do not ensure data races are +/// impossible, hence they cannot be `Sync`. A higher level example +/// of a non-`Sync` type is the reference counted pointer +/// `std::rc::Rc`, because any reference `&Rc` can clone a new +/// reference, which modifies the reference counts in a non-atomic +/// way. +/// +/// For cases when one does need thread-safe interior mutability, +/// types like the atomics in `std::sync` and `Mutex` & `RWLock` in +/// the `sync` crate do ensure that any mutation cannot cause data +/// races. Hence these types are `Sync`. +/// +/// Users writing their own types with interior mutability (or anything +/// else that is not thread-safe) should use the `NoSync` marker type +/// (from `std::marker`) to ensure that the compiler doesn't +/// consider the user-defined type to be `Sync`. Any types with +/// interior mutability must also use the `std::cell::UnsafeCell` wrapper +/// around the value(s) which can be mutated when behind a `&` +/// reference; not doing this is undefined behaviour (for example, +/// `transmute`-ing from `&T` to `&mut T` is illegal). +#[unstable = "will be overhauled with new lifetime rules; see RFC 458"] +#[lang="sync"] +pub unsafe trait Sync { + // Empty +} + + +/// A marker type whose type parameter `T` is considered to be +/// covariant with respect to the type itself. This is (typically) +/// used to indicate that an instance of the type `T` is being stored +/// into memory and read from, even though that may not be apparent. +/// +/// For more information about variance, refer to this Wikipedia +/// article . +/// +/// *Note:* It is very unusual to have to add a covariant constraint. +/// If you are not sure, you probably want to use `InvariantType`. +/// +/// # Example +/// +/// Given a struct `S` that includes a type parameter `T` +/// but does not actually *reference* that type parameter: +/// +/// ```ignore +/// use std::mem; +/// +/// struct S { x: *() } +/// fn get(s: &S) -> T { +/// unsafe { +/// let x: *T = mem::transmute(s.x); +/// *x +/// } +/// } +/// ``` +/// +/// The type system would currently infer that the value of +/// the type parameter `T` is irrelevant, and hence a `S` is +/// a subtype of `S>` (or, for that matter, `S` for +/// any `U`). But this is incorrect because `get()` converts the +/// `*()` into a `*T` and reads from it. Therefore, we should include the +/// a marker field `CovariantType` to inform the type checker that +/// `S` is a subtype of `S` if `T` is a subtype of `U` +/// (for example, `S<&'static int>` is a subtype of `S<&'a int>` +/// for some lifetime `'a`, but not the other way around). +#[unstable = "likely to change with new variance strategy"] +#[lang="covariant_type"] +#[derive(PartialEq, Eq, PartialOrd, Ord)] +pub struct CovariantType; + +impl Copy for CovariantType {} +impl Clone for CovariantType { + fn clone(&self) -> CovariantType { *self } +} + +/// A marker type whose type parameter `T` is considered to be +/// contravariant with respect to the type itself. This is (typically) +/// used to indicate that an instance of the type `T` will be consumed +/// (but not read from), even though that may not be apparent. +/// +/// For more information about variance, refer to this Wikipedia +/// article . +/// +/// *Note:* It is very unusual to have to add a contravariant constraint. +/// If you are not sure, you probably want to use `InvariantType`. +/// +/// # Example +/// +/// Given a struct `S` that includes a type parameter `T` +/// but does not actually *reference* that type parameter: +/// +/// ``` +/// use std::mem; +/// +/// struct S { x: *const () } +/// fn get(s: &S, v: T) { +/// unsafe { +/// let x: fn(T) = mem::transmute(s.x); +/// x(v) +/// } +/// } +/// ``` +/// +/// The type system would currently infer that the value of +/// the type parameter `T` is irrelevant, and hence a `S` is +/// a subtype of `S>` (or, for that matter, `S` for +/// any `U`). But this is incorrect because `get()` converts the +/// `*()` into a `fn(T)` and then passes a value of type `T` to it. +/// +/// Supplying a `ContravariantType` marker would correct the +/// problem, because it would mark `S` so that `S` is only a +/// subtype of `S` if `U` is a subtype of `T`; given that the +/// function requires arguments of type `T`, it must also accept +/// arguments of type `U`, hence such a conversion is safe. +#[unstable = "likely to change with new variance strategy"] +#[lang="contravariant_type"] +#[derive(PartialEq, Eq, PartialOrd, Ord)] +pub struct ContravariantType; + +impl Copy for ContravariantType {} +impl Clone for ContravariantType { + fn clone(&self) -> ContravariantType { *self } +} + +/// A marker type whose type parameter `T` is considered to be +/// invariant with respect to the type itself. This is (typically) +/// used to indicate that instances of the type `T` may be read or +/// written, even though that may not be apparent. +/// +/// For more information about variance, refer to this Wikipedia +/// article . +/// +/// # Example +/// +/// The Cell type is an example which uses unsafe code to achieve +/// "interior" mutability: +/// +/// ``` +/// pub struct Cell { value: T } +/// # fn main() {} +/// ``` +/// +/// The type system would infer that `value` is only read here and +/// never written, but in fact `Cell` uses unsafe code to achieve +/// interior mutability. +#[unstable = "likely to change with new variance strategy"] +#[lang="invariant_type"] +#[derive(PartialEq, Eq, PartialOrd, Ord)] +pub struct InvariantType; + +#[unstable = "likely to change with new variance strategy"] +impl Copy for InvariantType {} +#[unstable = "likely to change with new variance strategy"] +impl Clone for InvariantType { + fn clone(&self) -> InvariantType { *self } +} + +/// As `CovariantType`, but for lifetime parameters. Using +/// `CovariantLifetime<'a>` indicates that it is ok to substitute +/// a *longer* lifetime for `'a` than the one you originally +/// started with (e.g., you could convert any lifetime `'foo` to +/// `'static`). You almost certainly want `ContravariantLifetime` +/// instead, or possibly `InvariantLifetime`. The only case where +/// it would be appropriate is that you have a (type-casted, and +/// hence hidden from the type system) function pointer with a +/// signature like `fn(&'a T)` (and no other uses of `'a`). In +/// this case, it is ok to substitute a larger lifetime for `'a` +/// (e.g., `fn(&'static T)`), because the function is only +/// becoming more selective in terms of what it accepts as +/// argument. +/// +/// For more information about variance, refer to this Wikipedia +/// article . +#[unstable = "likely to change with new variance strategy"] +#[lang="covariant_lifetime"] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub struct CovariantLifetime<'a>; + +/// As `ContravariantType`, but for lifetime parameters. Using +/// `ContravariantLifetime<'a>` indicates that it is ok to +/// substitute a *shorter* lifetime for `'a` than the one you +/// originally started with (e.g., you could convert `'static` to +/// any lifetime `'foo`). This is appropriate for cases where you +/// have an unsafe pointer that is actually a pointer into some +/// memory with lifetime `'a`, and thus you want to limit the +/// lifetime of your data structure to `'a`. An example of where +/// this is used is the iterator for vectors. +/// +/// For more information about variance, refer to this Wikipedia +/// article . +#[unstable = "likely to change with new variance strategy"] +#[lang="contravariant_lifetime"] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub struct ContravariantLifetime<'a>; + +/// As `InvariantType`, but for lifetime parameters. Using +/// `InvariantLifetime<'a>` indicates that it is not ok to +/// substitute any other lifetime for `'a` besides its original +/// value. This is appropriate for cases where you have an unsafe +/// pointer that is actually a pointer into memory with lifetime `'a`, +/// and this pointer is itself stored in an inherently mutable +/// location (such as a `Cell`). +#[unstable = "likely to change with new variance strategy"] +#[lang="invariant_lifetime"] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub struct InvariantLifetime<'a>; + +/// A type which is considered "not sendable", meaning that it cannot +/// be safely sent between tasks, even if it is owned. This is +/// typically embedded in other types, such as `Gc`, to ensure that +/// their instances remain thread-local. +#[unstable = "likely to change with new variance strategy"] +#[lang="no_send_bound"] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub struct NoSend; + +/// A type which is considered "not POD", meaning that it is not +/// implicitly copyable. This is typically embedded in other types to +/// ensure that they are never copied, even if they lack a destructor. +#[unstable = "likely to change with new variance strategy"] +#[lang="no_copy_bound"] +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] +#[allow(missing_copy_implementations)] +pub struct NoCopy; + +/// A type which is considered "not sync", meaning that +/// its contents are not threadsafe, hence they cannot be +/// shared between tasks. +#[unstable = "likely to change with new variance strategy"] +#[lang="no_sync_bound"] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub struct NoSync; + +/// A type which is considered managed by the GC. This is typically +/// embedded in other types. +#[unstable = "likely to change with new variance strategy"] +#[lang="managed_bound"] +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] +#[allow(missing_copy_implementations)] +pub struct Managed; diff --git a/src/libcore/mem.rs b/src/libcore/mem.rs index c60569161213f..8438c9b206ee7 100644 --- a/src/libcore/mem.rs +++ b/src/libcore/mem.rs @@ -15,7 +15,7 @@ #![stable] -use kinds::Sized; +use marker::Sized; use intrinsics; use ptr; diff --git a/src/libcore/num/mod.rs b/src/libcore/num/mod.rs index 192d6063f6bbc..490d8111f463a 100644 --- a/src/libcore/num/mod.rs +++ b/src/libcore/num/mod.rs @@ -21,10 +21,10 @@ use cmp::{PartialEq, Eq}; use cmp::{PartialOrd, Ord}; use intrinsics; use iter::IteratorExt; -use kinds::Copy; +use marker::Copy; use mem::size_of; use ops::{Add, Sub, Mul, Div, Rem, Neg}; -use ops::{Not, BitAnd, BitOr, BitXor, Shl, Shr}; +use ops::{Not, BitAnd, BitOr, BitXor, Shl, Shr, Index}; use option::Option; use option::Option::{Some, None}; use str::{FromStr, StrExt}; @@ -992,7 +992,7 @@ impl_to_primitive_float! { f64 } /// A generic trait for converting a number to a value. #[experimental = "trait is likely to be removed"] -pub trait FromPrimitive : ::kinds::Sized { +pub trait FromPrimitive : ::marker::Sized { /// Convert an `int` to return an optional value of this type. If the /// value cannot be represented by this value, the `None` is returned. #[inline] @@ -1577,7 +1577,7 @@ macro_rules! from_str_radix_float_impl { }; // Parse the exponent as decimal integer - let src = src[offset..]; + let src = src.index(&(offset..)); let (is_positive, exp) = match src.slice_shift_char() { Some(('-', src)) => (false, src.parse::()), Some(('+', src)) => (true, src.parse::()), diff --git a/src/libcore/ops.rs b/src/libcore/ops.rs index 97d94e73bb33a..4debab91739c1 100644 --- a/src/libcore/ops.rs +++ b/src/libcore/ops.rs @@ -51,8 +51,8 @@ //! } //! } //! fn main() { -//! println!("{}", Point {x: 1, y: 0} + Point {x: 2, y: 3}); -//! println!("{}", Point {x: 1, y: 0} - Point {x: 2, y: 3}); +//! println!("{:?}", Point {x: 1, y: 0} + Point {x: 2, y: 3}); +//! println!("{:?}", Point {x: 1, y: 0} - Point {x: 2, y: 3}); //! } //! ``` //! @@ -63,7 +63,7 @@ use clone::Clone; use iter::{Step, Iterator,DoubleEndedIterator,ExactSizeIterator}; -use kinds::Sized; +use marker::Sized; use option::Option::{self, Some, None}; /// The `Drop` trait is used to run some code when a value goes out of scope. This @@ -846,105 +846,6 @@ pub trait IndexMut { fn index_mut<'a>(&'a mut self, index: &Index) -> &'a mut Self::Output; } -/// The `Slice` trait is used to specify the functionality of slicing operations -/// like `arr[from..to]` when used in an immutable context. -/// -/// # Example -/// -/// A trivial implementation of `Slice`. When `Foo[..Foo]` happens, it ends up -/// calling `slice_to`, and therefore, `main` prints `Slicing!`. -/// -/// ```ignore -/// use std::ops::Slice; -/// -/// #[derive(Copy)] -/// struct Foo; -/// -/// impl Slice for Foo { -/// fn as_slice_<'a>(&'a self) -> &'a Foo { -/// println!("Slicing!"); -/// self -/// } -/// fn slice_from_or_fail<'a>(&'a self, _from: &Foo) -> &'a Foo { -/// println!("Slicing!"); -/// self -/// } -/// fn slice_to_or_fail<'a>(&'a self, _to: &Foo) -> &'a Foo { -/// println!("Slicing!"); -/// self -/// } -/// fn slice_or_fail<'a>(&'a self, _from: &Foo, _to: &Foo) -> &'a Foo { -/// println!("Slicing!"); -/// self -/// } -/// } -/// -/// fn main() { -/// Foo[..Foo]; -/// } -/// ``` -#[lang="slice"] -pub trait Slice { - /// The method for the slicing operation foo[] - fn as_slice_<'a>(&'a self) -> &'a Result; - /// The method for the slicing operation foo[from..] - fn slice_from_or_fail<'a>(&'a self, from: &Idx) -> &'a Result; - /// The method for the slicing operation foo[..to] - fn slice_to_or_fail<'a>(&'a self, to: &Idx) -> &'a Result; - /// The method for the slicing operation foo[from..to] - fn slice_or_fail<'a>(&'a self, from: &Idx, to: &Idx) -> &'a Result; -} - -/// The `SliceMut` trait is used to specify the functionality of slicing -/// operations like `arr[from..to]`, when used in a mutable context. -/// -/// # Example -/// -/// A trivial implementation of `SliceMut`. When `Foo[Foo..]` happens, it ends up -/// calling `slice_from_mut`, and therefore, `main` prints `Slicing!`. -/// -/// ```ignore -/// use std::ops::SliceMut; -/// -/// #[derive(Copy)] -/// struct Foo; -/// -/// impl SliceMut for Foo { -/// fn as_mut_slice_<'a>(&'a mut self) -> &'a mut Foo { -/// println!("Slicing!"); -/// self -/// } -/// fn slice_from_or_fail_mut<'a>(&'a mut self, _from: &Foo) -> &'a mut Foo { -/// println!("Slicing!"); -/// self -/// } -/// fn slice_to_or_fail_mut<'a>(&'a mut self, _to: &Foo) -> &'a mut Foo { -/// println!("Slicing!"); -/// self -/// } -/// fn slice_or_fail_mut<'a>(&'a mut self, _from: &Foo, _to: &Foo) -> &'a mut Foo { -/// println!("Slicing!"); -/// self -/// } -/// } -/// -/// pub fn main() { -/// Foo[mut Foo..]; -/// } -/// ``` -#[lang="slice_mut"] -pub trait SliceMut { - /// The method for the slicing operation foo[] - fn as_mut_slice_<'a>(&'a mut self) -> &'a mut Result; - /// The method for the slicing operation foo[from..] - fn slice_from_or_fail_mut<'a>(&'a mut self, from: &Idx) -> &'a mut Result; - /// The method for the slicing operation foo[..to] - fn slice_to_or_fail_mut<'a>(&'a mut self, to: &Idx) -> &'a mut Result; - /// The method for the slicing operation foo[from..to] - fn slice_or_fail_mut<'a>(&'a mut self, from: &Idx, to: &Idx) -> &'a mut Result; -} - - /// An unbounded range. #[derive(Copy)] #[lang="full_range"] @@ -962,8 +863,6 @@ pub struct Range { pub end: Idx, } -// FIXME(#19391) needs a snapshot -//impl> Iterator for Range { #[unstable = "API still in development"] impl Iterator for Range { type Item = Idx; @@ -1134,7 +1033,7 @@ impl<'a, T: ?Sized> Deref for &'a mut T { pub trait DerefMut: Deref { /// The method called to mutably dereference a value #[stable] - fn deref_mut<'a>(&'a mut self) -> &'a mut ::Target; + fn deref_mut<'a>(&'a mut self) -> &'a mut Self::Target; } #[stable] diff --git a/src/libcore/option.rs b/src/libcore/option.rs index 39d0f024d4d41..272570a0d5bb9 100644 --- a/src/libcore/option.rs +++ b/src/libcore/option.rs @@ -238,7 +238,7 @@ impl Option { /// // First, cast `Option` to `Option<&String>` with `as_ref`, /// // then consume *that* with `map`, leaving `num_as_str` on the stack. /// let num_as_int: Option = num_as_str.as_ref().map(|n| n.len()); - /// println!("still can print num_as_str: {}", num_as_str); + /// println!("still can print num_as_str: {:?}", num_as_str); /// ``` #[inline] #[stable] diff --git a/src/libcore/prelude.rs b/src/libcore/prelude.rs index e88cb73c8a9b7..c3bb9c91557f3 100644 --- a/src/libcore/prelude.rs +++ b/src/libcore/prelude.rs @@ -29,8 +29,8 @@ //! ``` // Reexported core operators -pub use kinds::{Copy, Send, Sized, Sync}; -pub use ops::{Drop, Fn, FnMut, FnOnce}; +pub use marker::{Copy, Send, Sized, Sync}; +pub use ops::{Drop, Fn, FnMut, FnOnce, FullRange}; // Reexported functions pub use iter::range; diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index 0b77f3456b2f5..c35d948165ab8 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -92,7 +92,7 @@ use mem; use clone::Clone; use intrinsics; use option::Option::{self, Some, None}; -use kinds::{Send, Sized, Sync}; +use marker::{Send, Sized, Sync}; use cmp::{PartialEq, Eq, Ord, PartialOrd}; use cmp::Ordering::{self, Less, Equal, Greater}; diff --git a/src/libcore/raw.rs b/src/libcore/raw.rs index 5ef6f6b2623aa..1ad6d43f76f09 100644 --- a/src/libcore/raw.rs +++ b/src/libcore/raw.rs @@ -18,7 +18,7 @@ //! //! Their definition should always match the ABI defined in `rustc::back::abi`. -use kinds::Copy; +use marker::Copy; use mem; /// The representation of a Rust slice diff --git a/src/libcore/result.rs b/src/libcore/result.rs index 8e9bf5487e3ed..95ae6ebfb68c3 100644 --- a/src/libcore/result.rs +++ b/src/libcore/result.rs @@ -47,10 +47,10 @@ //! let version = parse_version(&[1, 2, 3, 4]); //! match version { //! Ok(v) => { -//! println!("working with version: {}", v); +//! println!("working with version: {:?}", v); //! } //! Err(e) => { -//! println!("error parsing header: {}", e); +//! println!("error parsing header: {:?}", e); //! } //! } //! ``` @@ -743,7 +743,7 @@ impl Result { match self { Ok(t) => t, Err(e) => - panic!("called `Result::unwrap()` on an `Err` value: {}", e) + panic!("called `Result::unwrap()` on an `Err` value: {:?}", e) } } } @@ -773,7 +773,7 @@ impl Result { pub fn unwrap_err(self) -> E { match self { Ok(t) => - panic!("called `Result::unwrap_err()` on an `Ok` value: {}", t), + panic!("called `Result::unwrap_err()` on an `Ok` value: {:?}", t), Err(e) => e } } diff --git a/src/libcore/simd.rs b/src/libcore/simd.rs index 66b29bab98c24..1f9aebb91beab 100644 --- a/src/libcore/simd.rs +++ b/src/libcore/simd.rs @@ -25,7 +25,7 @@ //! use std::simd::f32x4; //! let a = f32x4(40.0, 41.0, 42.0, 43.0); //! let b = f32x4(1.0, 1.1, 3.4, 9.8); -//! println!("{}", a + b); +//! println!("{:?}", a + b); //! } //! ``` //! diff --git a/src/libcore/slice.rs b/src/libcore/slice.rs index 093ed0b242f5f..bf2df46537008 100644 --- a/src/libcore/slice.rs +++ b/src/libcore/slice.rs @@ -41,9 +41,9 @@ use cmp::Ordering::{Less, Equal, Greater}; use cmp; use default::Default; use iter::*; -use kinds::Copy; +use marker::Copy; use num::Int; -use ops::{FnMut, self}; +use ops::{FnMut, self, Index}; use option::Option; use option::Option::{None, Some}; use result::Result; @@ -52,7 +52,7 @@ use ptr; use ptr::PtrExt; use mem; use mem::size_of; -use kinds::{Sized, marker}; +use marker::{Sized, self}; use raw::Repr; // Avoid conflicts with *both* the Slice trait (buggy) and the `slice::raw` module. use raw::Slice as RawSlice; @@ -159,7 +159,7 @@ impl SliceExt for [T] { #[inline] fn split_at(&self, mid: uint) -> (&[T], &[T]) { - (self[..mid], self[mid..]) + (self.index(&(0..mid)), self.index(&(mid..))) } #[inline] @@ -236,11 +236,11 @@ impl SliceExt for [T] { } #[inline] - fn tail(&self) -> &[T] { self[1..] } + fn tail(&self) -> &[T] { self.index(&(1..)) } #[inline] fn init(&self) -> &[T] { - self[..self.len() - 1] + self.index(&(0..(self.len() - 1))) } #[inline] @@ -292,17 +292,17 @@ impl SliceExt for [T] { fn as_mut_slice(&mut self) -> &mut [T] { self } fn slice_mut(&mut self, start: uint, end: uint) -> &mut [T] { - ops::SliceMut::slice_or_fail_mut(self, &start, &end) + ops::IndexMut::index_mut(self, &ops::Range { start: start, end: end } ) } #[inline] fn slice_from_mut(&mut self, start: uint) -> &mut [T] { - ops::SliceMut::slice_from_or_fail_mut(self, &start) + ops::IndexMut::index_mut(self, &ops::RangeFrom { start: start } ) } #[inline] fn slice_to_mut(&mut self, end: uint) -> &mut [T] { - ops::SliceMut::slice_to_or_fail_mut(self, &end) + ops::IndexMut::index_mut(self, &ops::RangeTo { end: end } ) } #[inline] @@ -310,8 +310,8 @@ impl SliceExt for [T] { unsafe { let self2: &mut [T] = mem::transmute_copy(&self); - (ops::SliceMut::slice_to_or_fail_mut(self, &mid), - ops::SliceMut::slice_from_or_fail_mut(self2, &mid)) + (ops::IndexMut::index_mut(self, &ops::RangeTo { end: mid } ), + ops::IndexMut::index_mut(self2, &ops::RangeFrom { start: mid } )) } } @@ -443,13 +443,13 @@ impl SliceExt for [T] { #[inline] fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq { let n = needle.len(); - self.len() >= n && needle == self[..n] + self.len() >= n && needle == self.index(&(0..n)) } #[inline] fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq { let (m, n) = (self.len(), needle.len()); - m >= n && needle == self[m-n..] + m >= n && needle == self.index(&((m-n)..)) } #[unstable] @@ -551,62 +551,79 @@ impl ops::IndexMut for [T] { } } -impl ops::Slice for [T] { +impl ops::Index> for [T] { + type Output = [T]; #[inline] - fn as_slice_<'a>(&'a self) -> &'a [T] { - self - } - - #[inline] - fn slice_from_or_fail<'a>(&'a self, start: &uint) -> &'a [T] { - self.slice_or_fail(start, &self.len()) - } - - #[inline] - fn slice_to_or_fail<'a>(&'a self, end: &uint) -> &'a [T] { - self.slice_or_fail(&0, end) - } - #[inline] - fn slice_or_fail<'a>(&'a self, start: &uint, end: &uint) -> &'a [T] { - assert!(*start <= *end); - assert!(*end <= self.len()); + fn index(&self, index: &ops::Range) -> &[T] { + assert!(index.start <= index.end); + assert!(index.end <= self.len()); unsafe { transmute(RawSlice { - data: self.as_ptr().offset(*start as int), - len: (*end - *start) + data: self.as_ptr().offset(index.start as int), + len: index.end - index.start }) } } } - -impl ops::SliceMut for [T] { +impl ops::Index> for [T] { + type Output = [T]; #[inline] - fn as_mut_slice_<'a>(&'a mut self) -> &'a mut [T] { - self + fn index(&self, index: &ops::RangeTo) -> &[T] { + self.index(&ops::Range{ start: 0, end: index.end }) } - +} +impl ops::Index> for [T] { + type Output = [T]; #[inline] - fn slice_from_or_fail_mut<'a>(&'a mut self, start: &uint) -> &'a mut [T] { - let len = &self.len(); - self.slice_or_fail_mut(start, len) + fn index(&self, index: &ops::RangeFrom) -> &[T] { + self.index(&ops::Range{ start: index.start, end: self.len() }) } - +} +impl ops::Index for [T] { + type Output = [T]; #[inline] - fn slice_to_or_fail_mut<'a>(&'a mut self, end: &uint) -> &'a mut [T] { - self.slice_or_fail_mut(&0, end) + fn index(&self, _index: &ops::FullRange) -> &[T] { + self } +} + +impl ops::IndexMut> for [T] { + type Output = [T]; #[inline] - fn slice_or_fail_mut<'a>(&'a mut self, start: &uint, end: &uint) -> &'a mut [T] { - assert!(*start <= *end); - assert!(*end <= self.len()); + fn index_mut(&mut self, index: &ops::Range) -> &mut [T] { + assert!(index.start <= index.end); + assert!(index.end <= self.len()); unsafe { transmute(RawSlice { - data: self.as_ptr().offset(*start as int), - len: (*end - *start) + data: self.as_ptr().offset(index.start as int), + len: index.end - index.start }) } } } +impl ops::IndexMut> for [T] { + type Output = [T]; + #[inline] + fn index_mut(&mut self, index: &ops::RangeTo) -> &mut [T] { + self.index_mut(&ops::Range{ start: 0, end: index.end }) + } +} +impl ops::IndexMut> for [T] { + type Output = [T]; + #[inline] + fn index_mut(&mut self, index: &ops::RangeFrom) -> &mut [T] { + let len = self.len(); + self.index_mut(&ops::Range{ start: index.start, end: len }) + } +} +impl ops::IndexMut for [T] { + type Output = [T]; + #[inline] + fn index_mut(&mut self, _index: &ops::FullRange) -> &mut [T] { + self + } +} + //////////////////////////////////////////////////////////////////////////////// // Common traits @@ -716,7 +733,7 @@ macro_rules! iterator { } macro_rules! make_slice { - ($t: ty -> $result: ty: $start: expr, $end: expr) => {{ + ($t: ty => $result: ty: $start: expr, $end: expr) => {{ let diff = $end as uint - $start as uint; let len = if mem::size_of::() == 0 { diff @@ -738,21 +755,38 @@ pub struct Iter<'a, T: 'a> { } #[experimental] -impl<'a, T> ops::Slice for Iter<'a, T> { - fn as_slice_(&self) -> &[T] { - self.as_slice() +impl<'a, T> ops::Index> for Iter<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, index: &ops::Range) -> &[T] { + self.as_slice().index(index) } - fn slice_from_or_fail<'b>(&'b self, from: &uint) -> &'b [T] { - use ops::Slice; - self.as_slice().slice_from_or_fail(from) +} + +#[experimental] +impl<'a, T> ops::Index> for Iter<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, index: &ops::RangeTo) -> &[T] { + self.as_slice().index(index) } - fn slice_to_or_fail<'b>(&'b self, to: &uint) -> &'b [T] { - use ops::Slice; - self.as_slice().slice_to_or_fail(to) +} + +#[experimental] +impl<'a, T> ops::Index> for Iter<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, index: &ops::RangeFrom) -> &[T] { + self.as_slice().index(index) } - fn slice_or_fail<'b>(&'b self, from: &uint, to: &uint) -> &'b [T] { - use ops::Slice; - self.as_slice().slice_or_fail(from, to) +} + +#[experimental] +impl<'a, T> ops::Index for Iter<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, _index: &ops::FullRange) -> &[T] { + self.as_slice() } } @@ -763,7 +797,7 @@ impl<'a, T> Iter<'a, T> { /// iterator can continue to be used while this exists. #[experimental] pub fn as_slice(&self) -> &'a [T] { - make_slice!(T -> &'a [T]: self.ptr, self.end) + make_slice!(T => &'a [T]: self.ptr, self.end) } } @@ -812,44 +846,74 @@ pub struct IterMut<'a, T: 'a> { marker: marker::ContravariantLifetime<'a>, } + #[experimental] -impl<'a, T> ops::Slice for IterMut<'a, T> { - fn as_slice_<'b>(&'b self) -> &'b [T] { - make_slice!(T -> &'b [T]: self.ptr, self.end) +impl<'a, T> ops::Index> for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, index: &ops::Range) -> &[T] { + self.index(&ops::FullRange).index(index) } - fn slice_from_or_fail<'b>(&'b self, from: &uint) -> &'b [T] { - use ops::Slice; - self.as_slice_().slice_from_or_fail(from) +} +#[experimental] +impl<'a, T> ops::Index> for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, index: &ops::RangeTo) -> &[T] { + self.index(&ops::FullRange).index(index) } - fn slice_to_or_fail<'b>(&'b self, to: &uint) -> &'b [T] { - use ops::Slice; - self.as_slice_().slice_to_or_fail(to) +} +#[experimental] +impl<'a, T> ops::Index> for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, index: &ops::RangeFrom) -> &[T] { + self.index(&ops::FullRange).index(index) } - fn slice_or_fail<'b>(&'b self, from: &uint, to: &uint) -> &'b [T] { - use ops::Slice; - self.as_slice_().slice_or_fail(from, to) +} +#[experimental] +impl<'a, T> ops::Index for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, _index: &ops::FullRange) -> &[T] { + make_slice!(T => &[T]: self.ptr, self.end) } } #[experimental] -impl<'a, T> ops::SliceMut for IterMut<'a, T> { - fn as_mut_slice_<'b>(&'b mut self) -> &'b mut [T] { - make_slice!(T -> &'b mut [T]: self.ptr, self.end) +impl<'a, T> ops::IndexMut> for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index_mut(&mut self, index: &ops::Range) -> &mut [T] { + self.index_mut(&ops::FullRange).index_mut(index) } - fn slice_from_or_fail_mut<'b>(&'b mut self, from: &uint) -> &'b mut [T] { - use ops::SliceMut; - self.as_mut_slice_().slice_from_or_fail_mut(from) +} +#[experimental] +impl<'a, T> ops::IndexMut> for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index_mut(&mut self, index: &ops::RangeTo) -> &mut [T] { + self.index_mut(&ops::FullRange).index_mut(index) } - fn slice_to_or_fail_mut<'b>(&'b mut self, to: &uint) -> &'b mut [T] { - use ops::SliceMut; - self.as_mut_slice_().slice_to_or_fail_mut(to) +} +#[experimental] +impl<'a, T> ops::IndexMut> for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index_mut(&mut self, index: &ops::RangeFrom) -> &mut [T] { + self.index_mut(&ops::FullRange).index_mut(index) } - fn slice_or_fail_mut<'b>(&'b mut self, from: &uint, to: &uint) -> &'b mut [T] { - use ops::SliceMut; - self.as_mut_slice_().slice_or_fail_mut(from, to) +} +#[experimental] +impl<'a, T> ops::IndexMut for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index_mut(&mut self, _index: &ops::FullRange) -> &mut [T] { + make_slice!(T => &mut [T]: self.ptr, self.end) } } + impl<'a, T> IterMut<'a, T> { /// View the underlying data as a subslice of the original data. /// @@ -859,7 +923,7 @@ impl<'a, T> IterMut<'a, T> { /// restricted lifetimes that do not consume the iterator. #[experimental] pub fn into_slice(self) -> &'a mut [T] { - make_slice!(T -> &'a mut [T]: self.ptr, self.end) + make_slice!(T => &'a mut [T]: self.ptr, self.end) } } @@ -873,7 +937,7 @@ impl<'a, T> ExactSizeIterator for IterMut<'a, T> {} trait SplitIter: DoubleEndedIterator { /// Mark the underlying iterator as complete, extracting the remaining /// portion of the slice. - fn finish(&mut self) -> Option< ::Item>; + fn finish(&mut self) -> Option; } /// An iterator over subslices separated by elements that match a predicate @@ -908,8 +972,8 @@ impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool { match self.v.iter().position(|x| (self.pred)(x)) { None => self.finish(), Some(idx) => { - let ret = Some(self.v[..idx]); - self.v = self.v[idx + 1..]; + let ret = Some(self.v.index(&(0..idx))); + self.v = self.v.index(&((idx + 1)..)); ret } } @@ -934,8 +998,8 @@ impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> boo match self.v.iter().rposition(|x| (self.pred)(x)) { None => self.finish(), Some(idx) => { - let ret = Some(self.v[idx + 1..]); - self.v = self.v[..idx]; + let ret = Some(self.v.index(&((idx + 1)..))); + self.v = self.v.index(&(0..idx)); ret } } @@ -1038,7 +1102,7 @@ struct GenericSplitN { invert: bool } -impl> Iterator for GenericSplitN { +impl> Iterator for GenericSplitN { type Item = T; #[inline] @@ -1131,8 +1195,8 @@ impl<'a, T> Iterator for Windows<'a, T> { if self.size > self.v.len() { None } else { - let ret = Some(self.v[..self.size]); - self.v = self.v[1..]; + let ret = Some(self.v.index(&(0..self.size))); + self.v = self.v.index(&(1..)); ret } } @@ -1219,7 +1283,7 @@ impl<'a, T> RandomAccessIterator for Chunks<'a, T> { let mut hi = lo + self.size; if hi < lo || hi > self.v.len() { hi = self.v.len(); } - Some(self.v[lo..hi]) + Some(self.v.index(&(lo..hi))) } else { None } diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index b0a1fa0b1b4c6..3f8ce000e214c 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -23,10 +23,10 @@ use default::Default; use iter::range; use iter::ExactSizeIterator; use iter::{Map, Iterator, IteratorExt, DoubleEndedIterator}; -use kinds::Sized; +use marker::Sized; use mem; use num::Int; -use ops::{Fn, FnMut}; +use ops::{Fn, FnMut, Index}; use option::Option::{self, None, Some}; use ptr::PtrExt; use raw::{Repr, Slice}; @@ -142,7 +142,7 @@ Section: Creating a string */ /// Errors which can occur when attempting to interpret a byte slice as a `str`. -#[derive(Copy, Eq, PartialEq, Clone)] +#[derive(Copy, Eq, PartialEq, Clone, Show)] #[unstable = "error enumeration recently added and definitions may be refined"] pub enum Utf8Error { /// An invalid byte was detected at the byte offset given. @@ -580,7 +580,7 @@ impl NaiveSearcher { fn next(&mut self, haystack: &[u8], needle: &[u8]) -> Option<(uint, uint)> { while self.position + needle.len() <= haystack.len() { - if haystack[self.position .. self.position + needle.len()] == needle { + if haystack.index(&(self.position .. self.position + needle.len())) == needle { let match_pos = self.position; self.position += needle.len(); // add 1 for all matches return Some((match_pos, match_pos + needle.len())); @@ -701,10 +701,10 @@ impl TwoWaySearcher { // // What's going on is we have some critical factorization (u, v) of the // needle, and we want to determine whether u is a suffix of - // v[..period]. If it is, we use "Algorithm CP1". Otherwise we use + // v.index(&(0..period)). If it is, we use "Algorithm CP1". Otherwise we use // "Algorithm CP2", which is optimized for when the period of the needle // is large. - if needle[..crit_pos] == needle[period.. period + crit_pos] { + if needle.index(&(0..crit_pos)) == needle.index(&(period.. period + crit_pos)) { TwoWaySearcher { crit_pos: crit_pos, period: period, @@ -1118,25 +1118,32 @@ mod traits { } } - impl ops::Slice for str { + impl ops::Index> for str { + type Output = str; #[inline] - fn as_slice_<'a>(&'a self) -> &'a str { - self + fn index(&self, index: &ops::Range) -> &str { + self.slice(index.start, index.end) } - + } + impl ops::Index> for str { + type Output = str; #[inline] - fn slice_from_or_fail<'a>(&'a self, from: &uint) -> &'a str { - self.slice_from(*from) + fn index(&self, index: &ops::RangeTo) -> &str { + self.slice_to(index.end) } - + } + impl ops::Index> for str { + type Output = str; #[inline] - fn slice_to_or_fail<'a>(&'a self, to: &uint) -> &'a str { - self.slice_to(*to) + fn index(&self, index: &ops::RangeFrom) -> &str { + self.slice_from(index.start) } - + } + impl ops::Index for str { + type Output = str; #[inline] - fn slice_or_fail<'a>(&'a self, from: &uint, to: &uint) -> &'a str { - self.slice(*from, *to) + fn index(&self, _index: &ops::FullRange) -> &str { + self } } } @@ -1405,13 +1412,13 @@ impl StrExt for str { #[inline] fn starts_with(&self, needle: &str) -> bool { let n = needle.len(); - self.len() >= n && needle.as_bytes() == self.as_bytes()[..n] + self.len() >= n && needle.as_bytes() == self.as_bytes().index(&(0..n)) } #[inline] fn ends_with(&self, needle: &str) -> bool { let (m, n) = (self.len(), needle.len()); - m >= n && needle.as_bytes() == self.as_bytes()[m-n..] + m >= n && needle.as_bytes() == self.as_bytes().index(&((m-n)..)) } #[inline] diff --git a/src/libcore/ty.rs b/src/libcore/ty.rs index f8e03662b00ec..35c1cb09281f4 100644 --- a/src/libcore/ty.rs +++ b/src/libcore/ty.rs @@ -10,4 +10,4 @@ //! Types dealing with unsafe actions. -use kinds::marker; +use marker; diff --git a/src/libcoretest/any.rs b/src/libcoretest/any.rs index 9b0471bfad936..c0be3a287940a 100644 --- a/src/libcoretest/any.rs +++ b/src/libcoretest/any.rs @@ -56,12 +56,12 @@ fn any_downcast_ref() { match a.downcast_ref::() { Some(&5) => {} - x => panic!("Unexpected value {}", x) + x => panic!("Unexpected value {:?}", x) } match a.downcast_ref::() { None => {} - x => panic!("Unexpected value {}", x) + x => panic!("Unexpected value {:?}", x) } } @@ -79,7 +79,7 @@ fn any_downcast_mut() { assert_eq!(*x, 5u); *x = 612; } - x => panic!("Unexpected value {}", x) + x => panic!("Unexpected value {:?}", x) } match b_r.downcast_mut::() { @@ -87,27 +87,27 @@ fn any_downcast_mut() { assert_eq!(*x, 7u); *x = 413; } - x => panic!("Unexpected value {}", x) + x => panic!("Unexpected value {:?}", x) } match a_r.downcast_mut::() { None => (), - x => panic!("Unexpected value {}", x) + x => panic!("Unexpected value {:?}", x) } match b_r.downcast_mut::() { None => (), - x => panic!("Unexpected value {}", x) + x => panic!("Unexpected value {:?}", x) } match a_r.downcast_mut::() { Some(&612) => {} - x => panic!("Unexpected value {}", x) + x => panic!("Unexpected value {:?}", x) } match b_r.downcast_mut::() { Some(&413) => {} - x => panic!("Unexpected value {}", x) + x => panic!("Unexpected value {:?}", x) } } diff --git a/src/libcoretest/cell.rs b/src/libcoretest/cell.rs index 54da6264bb049..86f34ecd15efe 100644 --- a/src/libcoretest/cell.rs +++ b/src/libcoretest/cell.rs @@ -29,10 +29,10 @@ fn smoketest_cell() { #[test] fn cell_has_sensible_show() { let x = Cell::new("foo bar"); - assert!(format!("{}", x).contains(x.get())); + assert!(format!("{:?}", x).contains(x.get())); x.set("baz qux"); - assert!(format!("{}", x).contains(x.get())); + assert!(format!("{:?}", x).contains(x.get())); } #[test] @@ -40,11 +40,11 @@ fn ref_and_refmut_have_sensible_show() { let refcell = RefCell::new("foo"); let refcell_refmut = refcell.borrow_mut(); - assert!(format!("{}", refcell_refmut).contains("foo")); + assert!(format!("{:?}", refcell_refmut).contains("foo")); drop(refcell_refmut); let refcell_ref = refcell.borrow(); - assert!(format!("{}", refcell_ref).contains("foo")); + assert!(format!("{:?}", refcell_ref).contains("foo")); drop(refcell_ref); } diff --git a/src/libcoretest/char.rs b/src/libcoretest/char.rs index b581cdbd71093..f901e8001767d 100644 --- a/src/libcoretest/char.rs +++ b/src/libcoretest/char.rs @@ -167,7 +167,7 @@ fn test_encode_utf8() { fn check(input: char, expect: &[u8]) { let mut buf = [0u8; 4]; let n = input.encode_utf8(buf.as_mut_slice()).unwrap_or(0); - assert_eq!(buf[..n], expect); + assert_eq!(buf.index(&(0..n)), expect); } check('x', &[0x78]); @@ -181,7 +181,7 @@ fn test_encode_utf16() { fn check(input: char, expect: &[u16]) { let mut buf = [0u16; 2]; let n = input.encode_utf16(buf.as_mut_slice()).unwrap_or(0); - assert_eq!(buf[..n], expect); + assert_eq!(buf.index(&(0..n)), expect); } check('x', &[0x0078]); diff --git a/src/libcoretest/fmt/num.rs b/src/libcoretest/fmt/num.rs index 1e28933becd6e..c259e4cbb686d 100644 --- a/src/libcoretest/fmt/num.rs +++ b/src/libcoretest/fmt/num.rs @@ -26,6 +26,11 @@ fn test_format_int() { assert!(format!("{}", -1i16) == "-1"); assert!(format!("{}", -1i32) == "-1"); assert!(format!("{}", -1i64) == "-1"); + assert!(format!("{:?}", 1i) == "1i"); + assert!(format!("{:?}", 1i8) == "1i8"); + assert!(format!("{:?}", 1i16) == "1i16"); + assert!(format!("{:?}", 1i32) == "1i32"); + assert!(format!("{:?}", 1i64) == "1i64"); assert!(format!("{:b}", 1i) == "1"); assert!(format!("{:b}", 1i8) == "1"); assert!(format!("{:b}", 1i16) == "1"); @@ -52,6 +57,11 @@ fn test_format_int() { assert!(format!("{}", 1u16) == "1"); assert!(format!("{}", 1u32) == "1"); assert!(format!("{}", 1u64) == "1"); + assert!(format!("{:?}", 1u) == "1u"); + assert!(format!("{:?}", 1u8) == "1u8"); + assert!(format!("{:?}", 1u16) == "1u16"); + assert!(format!("{:?}", 1u32) == "1u32"); + assert!(format!("{:?}", 1u64) == "1u64"); assert!(format!("{:b}", 1u) == "1"); assert!(format!("{:b}", 1u8) == "1"); assert!(format!("{:b}", 1u16) == "1"); @@ -84,12 +94,14 @@ fn test_format_int() { #[test] fn test_format_int_zero() { assert!(format!("{}", 0i) == "0"); + assert!(format!("{:?}", 0i) == "0i"); assert!(format!("{:b}", 0i) == "0"); assert!(format!("{:o}", 0i) == "0"); assert!(format!("{:x}", 0i) == "0"); assert!(format!("{:X}", 0i) == "0"); assert!(format!("{}", 0u) == "0"); + assert!(format!("{:?}", 0u) == "0u"); assert!(format!("{:b}", 0u) == "0"); assert!(format!("{:o}", 0u) == "0"); assert!(format!("{:x}", 0u) == "0"); @@ -183,6 +195,12 @@ mod uint { b.iter(|| { format!("{:x}", rng.gen::()); }) } + #[bench] + fn format_show(b: &mut Bencher) { + let mut rng = weak_rng(); + b.iter(|| { format!("{:?}", rng.gen::()); }) + } + #[bench] fn format_base_36(b: &mut Bencher) { let mut rng = weak_rng(); @@ -219,6 +237,12 @@ mod int { b.iter(|| { format!("{:x}", rng.gen::()); }) } + #[bench] + fn format_show(b: &mut Bencher) { + let mut rng = weak_rng(); + b.iter(|| { format!("{:?}", rng.gen::()); }) + } + #[bench] fn format_base_36(b: &mut Bencher) { let mut rng = weak_rng(); diff --git a/src/libcoretest/hash/mod.rs b/src/libcoretest/hash/mod.rs index a4bafe754ffdd..63bf9ec331454 100644 --- a/src/libcoretest/hash/mod.rs +++ b/src/libcoretest/hash/mod.rs @@ -7,7 +7,7 @@ // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. -use core::kinds::Sized; +use core::marker::Sized; use std::mem; use core::slice::SliceExt; diff --git a/src/libcoretest/iter.rs b/src/libcoretest/iter.rs index 73db72d0313e6..61266a9264944 100644 --- a/src/libcoretest/iter.rs +++ b/src/libcoretest/iter.rs @@ -14,7 +14,6 @@ use core::iter::MinMaxResult::*; use core::num::SignedInt; use core::uint; use core::cmp; -use core::ops::Slice; use test::Bencher; @@ -230,7 +229,7 @@ fn test_inspect() { .collect::>(); assert_eq!(n, xs.len()); - assert_eq!(xs[], ys[]); + assert_eq!(&xs[], &ys[]); } #[test] @@ -281,21 +280,21 @@ fn test_iterator_nth() { fn test_iterator_last() { let v: &[_] = &[0i, 1, 2, 3, 4]; assert_eq!(v.iter().last().unwrap(), &4); - assert_eq!(v[0..1].iter().last().unwrap(), &0); + assert_eq!(v[..1].iter().last().unwrap(), &0); } #[test] fn test_iterator_len() { let v: &[_] = &[0i, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; - assert_eq!(v[0..4].iter().count(), 4); - assert_eq!(v[0..10].iter().count(), 10); + assert_eq!(v[..4].iter().count(), 4); + assert_eq!(v[..10].iter().count(), 10); assert_eq!(v[0..0].iter().count(), 0); } #[test] fn test_iterator_sum() { let v: &[_] = &[0i, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; - assert_eq!(v[0..4].iter().map(|&x| x).sum(), 6); + assert_eq!(v[..4].iter().map(|&x| x).sum(), 6); assert_eq!(v.iter().map(|&x| x).sum(), 55); assert_eq!(v[0..0].iter().map(|&x| x).sum(), 0); } @@ -374,7 +373,7 @@ fn test_all() { assert!(v.iter().all(|&x| x < 10)); assert!(!v.iter().all(|&x| x % 2 == 0)); assert!(!v.iter().all(|&x| x > 100)); - assert!(v.slice_or_fail(&0, &0).iter().all(|_| panic!())); + assert!(v[0..0].iter().all(|_| panic!())); } #[test] @@ -383,7 +382,7 @@ fn test_any() { assert!(v.iter().any(|&x| x < 10)); assert!(v.iter().any(|&x| x % 2 == 0)); assert!(!v.iter().any(|&x| x > 100)); - assert!(!v.slice_or_fail(&0, &0).iter().any(|_| panic!())); + assert!(!v[0..0].iter().any(|_| panic!())); } #[test] @@ -586,7 +585,7 @@ fn check_randacc_iter(a: T, len: uint) where fn test_double_ended_flat_map() { let u = [0u,1]; let v = [5u,6,7,8]; - let mut it = u.iter().flat_map(|x| v[*x..v.len()].iter()); + let mut it = u.iter().flat_map(|x| v[(*x)..v.len()].iter()); assert_eq!(it.next_back().unwrap(), &8); assert_eq!(it.next().unwrap(), &5); assert_eq!(it.next_back().unwrap(), &7); diff --git a/src/libcoretest/num/mod.rs b/src/libcoretest/num/mod.rs index f86c85f821638..8186a4f0904af 100644 --- a/src/libcoretest/num/mod.rs +++ b/src/libcoretest/num/mod.rs @@ -12,10 +12,9 @@ use core::cmp::PartialEq; use core::fmt::Show; use core::num::{NumCast, cast}; use core::ops::{Add, Sub, Mul, Div, Rem}; -use core::kinds::Copy; +use core::marker::Copy; -#[cfg_attr(stage0, macro_escape)] -#[cfg_attr(not(stage0), macro_use)] +#[macro_use] mod int_macros; mod i8; @@ -24,8 +23,7 @@ mod i32; mod i64; mod int; -#[cfg_attr(stage0, macro_escape)] -#[cfg_attr(not(stage0), macro_use)] +#[macro_use] mod uint_macros; mod u8; diff --git a/src/libcoretest/option.rs b/src/libcoretest/option.rs index 4a459992098a0..1169f91023802 100644 --- a/src/libcoretest/option.rs +++ b/src/libcoretest/option.rs @@ -9,7 +9,7 @@ // except according to those terms. use core::option::*; -use core::kinds::marker; +use core::marker; use core::mem; use core::clone::Clone; diff --git a/src/libcoretest/result.rs b/src/libcoretest/result.rs index 52ea14dd05dd9..b9403598ec2b2 100644 --- a/src/libcoretest/result.rs +++ b/src/libcoretest/result.rs @@ -95,10 +95,10 @@ pub fn test_fmt_default() { let ok: Result = Ok(100); let err: Result = Err("Err"); - let s = format!("{}", ok); - assert_eq!(s, "Ok(100)"); - let s = format!("{}", err); - assert_eq!(s, "Err(Err)"); + let s = format!("{:?}", ok); + assert_eq!(s, "Ok(100i)"); + let s = format!("{:?}", err); + assert_eq!(s, "Err(\"Err\")"); } #[test] diff --git a/src/libcoretest/slice.rs b/src/libcoretest/slice.rs index 9ef7d6030593a..b714b6a4e417d 100644 --- a/src/libcoretest/slice.rs +++ b/src/libcoretest/slice.rs @@ -43,35 +43,35 @@ fn iterator_to_slice() { { let mut iter = data.iter(); - assert_eq!(iter[], other_data[]); + assert_eq!(&iter[], &other_data[]); iter.next(); - assert_eq!(iter[], other_data[1..]); + assert_eq!(&iter[], &other_data[1..]); iter.next_back(); - assert_eq!(iter[], other_data[1..2]); + assert_eq!(&iter[], &other_data[1..2]); let s = iter.as_slice(); iter.next(); - assert_eq!(s, other_data[1..2]); + assert_eq!(s, &other_data[1..2]); } { let mut iter = data.iter_mut(); - assert_eq!(iter[], other_data[]); + assert_eq!(iter.index(&FullRange), other_data.index(&FullRange)); // mutability: - assert!(iter[mut] == other_data); + assert!(&mut iter[] == other_data); iter.next(); - assert_eq!(iter[], other_data[1..]); - assert!(iter[mut] == other_data[mut 1..]); + assert_eq!(iter.index(&FullRange), other_data.index(&(1..))); + assert!(&mut iter[] == &mut other_data[1..]); iter.next_back(); - assert_eq!(iter[], other_data[1..2]); - assert!(iter[mut] == other_data[mut 1..2]); + assert_eq!(iter.index(&FullRange), other_data.index(&(1..2))); + assert!(&mut iter[] == &mut other_data[1..2]); let s = iter.into_slice(); - assert!(s == other_data[mut 1..2]); + assert!(s == &mut other_data[1..2]); } }} } diff --git a/src/libcoretest/tuple.rs b/src/libcoretest/tuple.rs index c3bc38a6614b8..62eb9f4ad3480 100644 --- a/src/libcoretest/tuple.rs +++ b/src/libcoretest/tuple.rs @@ -59,10 +59,10 @@ fn test_tuple_cmp() { #[test] fn test_show() { - let s = format!("{}", (1i,)); - assert_eq!(s, "(1,)"); - let s = format!("{}", (1i, true)); - assert_eq!(s, "(1, true)"); - let s = format!("{}", (1i, "hi", true)); - assert_eq!(s, "(1, hi, true)"); + let s = format!("{:?}", (1i,)); + assert_eq!(s, "(1i,)"); + let s = format!("{:?}", (1i, true)); + assert_eq!(s, "(1i, true)"); + let s = format!("{:?}", (1i, "hi", true)); + assert_eq!(s, "(1i, \"hi\", true)"); } diff --git a/src/libflate/lib.rs b/src/libflate/lib.rs index 6ac311fe4b646..f38440d86c6e8 100644 --- a/src/libflate/lib.rs +++ b/src/libflate/lib.rs @@ -21,7 +21,6 @@ #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![feature(unboxed_closures, associated_types)] #[cfg(test)] #[macro_use] extern crate log; diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index 917c6e99992f2..47cc072a636a1 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -23,8 +23,7 @@ html_root_url = "http://doc.rust-lang.org/nightly/", html_playground_url = "http://play.rust-lang.org/")] -#![feature(globs, slicing_syntax)] -#![feature(associated_types)] +#![feature(slicing_syntax)] pub use self::Piece::*; pub use self::Position::*; @@ -212,11 +211,12 @@ impl<'a> Parser<'a> { self.cur.next(); } Some((_, other)) => { - self.err(format!("expected `{}`, found `{}`", c, other)[]); + self.err(format!("expected `{:?}`, found `{:?}`", c, + other).index(&FullRange)); } None => { - self.err(format!("expected `{}` but string was terminated", - c)[]); + self.err(format!("expected `{:?}` but string was terminated", + c).index(&FullRange)); } } } @@ -239,12 +239,12 @@ impl<'a> Parser<'a> { // we may not consume the character, so clone the iterator match self.cur.clone().next() { Some((pos, '}')) | Some((pos, '{')) => { - return self.input[start..pos]; + return self.input.index(&(start..pos)); } Some(..) => { self.cur.next(); } None => { self.cur.next(); - return self.input[start..self.input.len()]; + return self.input.index(&(start..self.input.len())); } } } @@ -284,7 +284,7 @@ impl<'a> Parser<'a> { flags: 0, precision: CountImplied, width: CountImplied, - ty: self.input[0..0], + ty: self.input.index(&(0..0)), }; if !self.consume(':') { return spec } @@ -393,7 +393,7 @@ impl<'a> Parser<'a> { self.cur.next(); pos } - Some(..) | None => { return self.input[0..0]; } + Some(..) | None => { return self.input.index(&(0..0)); } }; let mut end; loop { @@ -405,7 +405,7 @@ impl<'a> Parser<'a> { None => { end = self.input.len(); break } } } - self.input[start..end] + self.input.index(&(start..end)) } /// Optionally parses an integer at the current position. This doesn't deal diff --git a/src/libgetopts/lib.rs b/src/libgetopts/lib.rs index 18077795e245f..f50e24c6354f1 100644 --- a/src/libgetopts/lib.rs +++ b/src/libgetopts/lib.rs @@ -85,8 +85,7 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/", html_playground_url = "http://play.rust-lang.org/")] -#![feature(globs, slicing_syntax)] -#![feature(unboxed_closures)] +#![feature(slicing_syntax)] #![deny(missing_docs)] #[cfg(test)] #[macro_use] extern crate log; @@ -105,7 +104,7 @@ use std::iter::repeat; use std::result; /// Name of an option. Either a string or a single char. -#[derive(Clone, PartialEq, Eq)] +#[derive(Clone, PartialEq, Eq, Show)] pub enum Name { /// A string representing the long name of an option. /// For example: "help" @@ -116,7 +115,7 @@ pub enum Name { } /// Describes whether an option has an argument. -#[derive(Clone, Copy, PartialEq, Eq)] +#[derive(Clone, Copy, PartialEq, Eq, Show)] pub enum HasArg { /// The option requires an argument. Yes, @@ -127,7 +126,7 @@ pub enum HasArg { } /// Describes how often an option may occur. -#[derive(Clone, Copy, PartialEq, Eq)] +#[derive(Clone, Copy, PartialEq, Eq, Show)] pub enum Occur { /// The option occurs once. Req, @@ -138,7 +137,7 @@ pub enum Occur { } /// A description of a possible option. -#[derive(Clone, PartialEq, Eq)] +#[derive(Clone, PartialEq, Eq, Show)] pub struct Opt { /// Name of the option pub name: Name, @@ -152,7 +151,7 @@ pub struct Opt { /// One group of options, e.g., both `-h` and `--help`, along with /// their shared description and properties. -#[derive(Clone, PartialEq, Eq)] +#[derive(Clone, PartialEq, Eq, Show)] pub struct OptGroup { /// Short name of the option, e.g. `h` for a `-h` option pub short_name: String, @@ -169,7 +168,7 @@ pub struct OptGroup { } /// Describes whether an option is given at all or has a value. -#[derive(Clone, PartialEq, Eq)] +#[derive(Clone, PartialEq, Eq, Show)] enum Optval { Val(String), Given, @@ -177,7 +176,7 @@ enum Optval { /// The result of checking command line arguments. Contains a vector /// of matches and a vector of free strings. -#[derive(Clone, PartialEq, Eq)] +#[derive(Clone, PartialEq, Eq, Show)] pub struct Matches { /// Options that matched opts: Vec, @@ -190,7 +189,7 @@ pub struct Matches { /// The type returned when the command line does not conform to the /// expected format. Use the `Show` implementation to output detailed /// information. -#[derive(Clone, PartialEq, Eq)] +#[derive(Clone, PartialEq, Eq, Show)] pub enum Fail { /// The option requires an argument but none was passed. ArgumentMissing(String), @@ -205,7 +204,7 @@ pub enum Fail { } /// The type of failure that occurred. -#[derive(Copy, PartialEq, Eq)] +#[derive(Copy, PartialEq, Eq, Show)] #[allow(missing_docs)] pub enum FailType { ArgumentMissing_, @@ -281,7 +280,7 @@ impl OptGroup { impl Matches { fn opt_vals(&self, nm: &str) -> Vec { - match find_opt(self.opts[], Name::from_str(nm)) { + match find_opt(self.opts.index(&FullRange), Name::from_str(nm)) { Some(id) => self.vals[id].clone(), None => panic!("No option '{}' defined", nm) } @@ -309,7 +308,7 @@ impl Matches { /// Returns true if any of several options were matched. pub fn opts_present(&self, names: &[String]) -> bool { for nm in names.iter() { - match find_opt(self.opts.as_slice(), Name::from_str(nm[])) { + match find_opt(self.opts.as_slice(), Name::from_str(nm.index(&FullRange))) { Some(id) if !self.vals[id].is_empty() => return true, _ => (), }; @@ -320,7 +319,7 @@ impl Matches { /// Returns the string argument supplied to one of several matching options or `None`. pub fn opts_str(&self, names: &[String]) -> Option { for nm in names.iter() { - match self.opt_val(nm[]) { + match self.opt_val(nm.index(&FullRange)) { Some(Val(ref s)) => return Some(s.clone()), _ => () } @@ -536,13 +535,13 @@ pub fn opt(short_name: &str, impl Fail { /// Convert a `Fail` enum into an error string. - #[deprecated="use `Show` (`{}` format specifier)"] + #[deprecated="use `fmt::String` (`{}` format specifier)"] pub fn to_err_msg(self) -> String { self.to_string() } } -impl fmt::Show for Fail { +impl fmt::String for Fail { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ArgumentMissing(ref nm) => { @@ -585,7 +584,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { while i < l { let cur = args[i].clone(); let curlen = cur.len(); - if !is_arg(cur[]) { + if !is_arg(cur.index(&FullRange)) { free.push(cur); } else if cur == "--" { let mut j = i + 1; @@ -595,7 +594,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { let mut names; let mut i_arg = None; if cur.as_bytes()[1] == b'-' { - let tail = cur[2..curlen]; + let tail = cur.index(&(2..curlen)); let tail_eq: Vec<&str> = tail.split('=').collect(); if tail_eq.len() <= 1 { names = vec!(Long(tail.to_string())); @@ -631,7 +630,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { }; if arg_follows && range.next < curlen { - i_arg = Some(cur[range.next..curlen].to_string()); + i_arg = Some(cur.index(&(range.next..curlen)).to_string()); break; } @@ -650,29 +649,34 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { if name_pos == names.len() && !i_arg.is_none() { return Err(UnexpectedArgument(nm.to_string())); } - vals[optid].push(Given); + let v = &mut vals[optid]; + v.push(Given); } Maybe => { if !i_arg.is_none() { - vals[optid] - .push(Val((i_arg.clone()) + let v = &mut vals[optid]; + v.push(Val((i_arg.clone()) .unwrap())); } else if name_pos < names.len() || i + 1 == l || - is_arg(args[i + 1][]) { - vals[optid].push(Given); + is_arg(args[i + 1].index(&FullRange)) { + let v = &mut vals[optid]; + v.push(Given); } else { i += 1; - vals[optid].push(Val(args[i].clone())); + let v = &mut vals[optid]; + v.push(Val(args[i].clone())); } } Yes => { if !i_arg.is_none() { - vals[optid].push(Val(i_arg.clone().unwrap())); + let v = &mut vals[optid]; + v.push(Val(i_arg.clone().unwrap())); } else if i + 1 == l { return Err(ArgumentMissing(nm.to_string())); } else { i += 1; - vals[optid].push(Val(args[i].clone())); + let v = &mut vals[optid]; + v.push(Val(args[i].clone())); } } } @@ -717,7 +721,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { 0 => {} 1 => { row.push('-'); - row.push_str(short_name[]); + row.push_str(short_name.index(&FullRange)); row.push(' '); } _ => panic!("the short name should only be 1 ascii char long"), @@ -728,7 +732,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { 0 => {} _ => { row.push_str("--"); - row.push_str(long_name[]); + row.push_str(long_name.index(&FullRange)); row.push(' '); } } @@ -736,10 +740,10 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { // arg match hasarg { No => {} - Yes => row.push_str(hint[]), + Yes => row.push_str(hint.index(&FullRange)), Maybe => { row.push('['); - row.push_str(hint[]); + row.push_str(hint.index(&FullRange)); row.push(']'); } } @@ -752,7 +756,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { row.push(' '); } } else { - row.push_str(desc_sep[]); + row.push_str(desc_sep.index(&FullRange)); } // Normalize desc to contain words separated by one space character @@ -764,14 +768,14 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { // FIXME: #5516 should be graphemes not codepoints let mut desc_rows = Vec::new(); - each_split_within(desc_normalized_whitespace[], 54, |substr| { + each_split_within(desc_normalized_whitespace.index(&FullRange), 54, |substr| { desc_rows.push(substr.to_string()); true }); // FIXME: #5516 should be graphemes not codepoints // wrapped description - row.push_str(desc_rows.connect(desc_sep[])[]); + row.push_str(desc_rows.connect(desc_sep.index(&FullRange)).index(&FullRange)); row }); @@ -790,10 +794,10 @@ fn format_option(opt: &OptGroup) -> String { // Use short_name is possible, but fallback to long_name. if opt.short_name.len() > 0 { line.push('-'); - line.push_str(opt.short_name[]); + line.push_str(opt.short_name.index(&FullRange)); } else { line.push_str("--"); - line.push_str(opt.long_name[]); + line.push_str(opt.long_name.index(&FullRange)); } if opt.hasarg != No { @@ -801,7 +805,7 @@ fn format_option(opt: &OptGroup) -> String { if opt.hasarg == Maybe { line.push('['); } - line.push_str(opt.hint[]); + line.push_str(opt.hint.index(&FullRange)); if opt.hasarg == Maybe { line.push(']'); } @@ -823,7 +827,7 @@ pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String { line.push_str(opts.iter() .map(format_option) .collect::>() - .connect(" ")[]); + .connect(" ").index(&FullRange)); line } @@ -886,9 +890,9 @@ fn each_split_within(ss: &str, lim: uint, mut it: F) -> bool where (B, Cr, UnderLim) => { B } (B, Cr, OverLim) if (i - last_start + 1) > lim => panic!("word starting with {} longer than limit!", - ss[last_start..i + 1]), + ss.index(&(last_start..(i + 1)))), (B, Cr, OverLim) => { - *cont = it(ss[slice_start..last_end]); + *cont = it(ss.index(&(slice_start..last_end))); slice_start = last_start; B } @@ -898,7 +902,7 @@ fn each_split_within(ss: &str, lim: uint, mut it: F) -> bool where } (B, Ws, OverLim) => { last_end = i; - *cont = it(ss[slice_start..last_end]); + *cont = it(ss.index(&(slice_start..last_end))); A } @@ -907,14 +911,14 @@ fn each_split_within(ss: &str, lim: uint, mut it: F) -> bool where B } (C, Cr, OverLim) => { - *cont = it(ss[slice_start..last_end]); + *cont = it(ss.index(&(slice_start..last_end))); slice_start = i; last_start = i; last_end = i; B } (C, Ws, OverLim) => { - *cont = it(ss[slice_start..last_end]); + *cont = it(ss.index(&(slice_start..last_end))); A } (C, Ws, UnderLim) => { diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 64cc490f4b163..83bad70e7b117 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -271,8 +271,7 @@ #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![feature(globs, slicing_syntax)] -#![feature(unboxed_closures)] +#![feature(slicing_syntax)] use self::LabelText::*; @@ -453,7 +452,7 @@ impl<'a> LabelText<'a> { pub fn escape(&self) -> String { match self { &LabelStr(ref s) => s.escape_default(), - &EscStr(ref s) => LabelText::escape_str(s[]), + &EscStr(ref s) => LabelText::escape_str(s.index(&FullRange)), } } @@ -482,7 +481,7 @@ impl<'a> LabelText<'a> { let mut prefix = self.pre_escaped_content().into_owned(); let suffix = suffix.pre_escaped_content(); prefix.push_str(r"\n\n"); - prefix.push_str(suffix[]); + prefix.push_str(suffix.index(&FullRange)); EscStr(prefix.into_cow()) } } @@ -676,7 +675,7 @@ mod tests { impl<'a> Labeller<'a, Node, &'a Edge> for LabelledGraph { fn graph_id(&'a self) -> Id<'a> { - Id::new(self.name[]).unwrap() + Id::new(self.name.index(&FullRange)).unwrap() } fn node_id(&'a self, n: &Node) -> Id<'a> { id_name(n) diff --git a/src/liblibc/lib.rs b/src/liblibc/lib.rs index e3f02146a75f4..e48272b4e09da 100644 --- a/src/liblibc/lib.rs +++ b/src/liblibc/lib.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(globs)] #![crate_name = "libc"] #![experimental] #![no_std] // we don't need std, and we can't have std, since it doesn't exist @@ -76,7 +75,7 @@ #![allow(non_upper_case_globals)] #![allow(missing_docs)] #![allow(non_snake_case)] -#![allow(raw_pointer_deriving)] +#![allow(raw_pointer_derive)] extern crate core; @@ -5081,5 +5080,7 @@ pub fn issue_14344_workaround() {} // FIXME #14344 force linkage to happen corre #[doc(hidden)] #[cfg(not(test))] mod std { - pub use core::kinds; + #[cfg(stage0)] + pub use core::marker as kinds; + pub use core::marker; } diff --git a/src/liblog/directive.rs b/src/liblog/directive.rs index 8134503019c99..d741019aa7b9c 100644 --- a/src/liblog/directive.rs +++ b/src/liblog/directive.rs @@ -84,7 +84,7 @@ pub fn parse_logging_spec(spec: &str) -> (Vec, Option) { match Regex::new(filter) { Ok(re) => Some(re), Err(e) => { - println!("warning: invalid regex filter - {}", e); + println!("warning: invalid regex filter - {:?}", e); None } } diff --git a/src/liblog/lib.rs b/src/liblog/lib.rs index df85e89efd17c..08b01e956e1ac 100644 --- a/src/liblog/lib.rs +++ b/src/liblog/lib.rs @@ -16,12 +16,12 @@ //! #[macro_use] extern crate log; //! //! fn main() { -//! debug!("this is a debug {}", "message"); +//! debug!("this is a debug {:?}", "message"); //! error!("this is printed by default"); //! //! if log_enabled!(log::INFO) { //! let x = 3i * 4i; // expensive computation -//! info!("the answer was: {}", x); +//! info!("the answer was: {:?}", x); //! } //! } //! ``` @@ -163,7 +163,7 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/", html_playground_url = "http://play.rust-lang.org/")] -#![feature(macro_rules, unboxed_closures, slicing_syntax)] +#![feature(slicing_syntax)] #![deny(missing_docs)] extern crate regex; @@ -182,8 +182,7 @@ use regex::Regex; use directive::LOG_LEVEL_NAMES; -#[cfg_attr(stage0, macro_escape)] -#[cfg_attr(not(stage0), macro_use)] +#[macro_use] pub mod macros; mod directive; @@ -238,11 +237,17 @@ struct DefaultLogger { pub struct LogLevel(pub u32); impl fmt::Show for LogLevel { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, fmt) + } +} + +impl fmt::String for LogLevel { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let LogLevel(level) = *self; match LOG_LEVEL_NAMES.get(level as uint - 1) { - Some(name) => name.fmt(fmt), - None => level.fmt(fmt) + Some(ref name) => fmt::String::fmt(name, fmt), + None => fmt::String::fmt(&level, fmt) } } } @@ -254,7 +259,7 @@ impl Logger for DefaultLogger { record.level, record.module_path, record.args) { - Err(e) => panic!("failed to log: {}", e), + Err(e) => panic!("failed to log: {:?}", e), Ok(()) => {} } } @@ -264,7 +269,7 @@ impl Drop for DefaultLogger { fn drop(&mut self) { // FIXME(#12628): is panicking the right thing to do? match self.handle.flush() { - Err(e) => panic!("failed to flush a logger: {}", e), + Err(e) => panic!("failed to flush a logger: {:?}", e), Ok(()) => {} } } @@ -282,7 +287,7 @@ pub fn log(level: u32, loc: &'static LogLocation, args: fmt::Arguments) { // Test the literal string from args against the current filter, if there // is one. match unsafe { FILTER.as_ref() } { - Some(filter) if !filter.is_match(args.to_string()[]) => return, + Some(filter) if !filter.is_match(args.to_string().index(&FullRange)) => return, _ => {} } @@ -377,7 +382,7 @@ fn enabled(level: u32, // Search for the longest match, the vector is assumed to be pre-sorted. for directive in iter.rev() { match directive.name { - Some(ref name) if !module.starts_with(name[]) => {}, + Some(ref name) if !module.starts_with(name.index(&FullRange)) => {}, Some(..) | None => { return level <= directive.level } @@ -392,7 +397,7 @@ fn enabled(level: u32, /// `Once` primitive (and this function is called from that primitive). fn init() { let (mut directives, filter) = match os::getenv("RUST_LOG") { - Some(spec) => directive::parse_logging_spec(spec[]), + Some(spec) => directive::parse_logging_spec(spec.index(&FullRange)), None => (Vec::new(), None), }; diff --git a/src/librand/distributions/range.rs b/src/librand/distributions/range.rs index 1038009522d67..1002d9693ba69 100644 --- a/src/librand/distributions/range.rs +++ b/src/librand/distributions/range.rs @@ -12,7 +12,7 @@ // this is surprisingly complicated to be both generic & correct -use core::prelude::*; +use core::prelude::{PartialOrd}; use core::num::Int; use Rng; @@ -166,7 +166,7 @@ mod tests { use std::num::Int; use std::prelude::v1::*; use distributions::{Sample, IndependentSample}; - use super::Range; + use super::Range as Range; #[should_fail] #[test] diff --git a/src/librand/lib.rs b/src/librand/lib.rs index c4dd08f9917e2..ad2a4dbec4e92 100644 --- a/src/librand/lib.rs +++ b/src/librand/lib.rs @@ -23,35 +23,14 @@ html_root_url = "http://doc.rust-lang.org/nightly/", html_playground_url = "http://play.rust-lang.org/")] -#![feature(macro_rules, phase, globs)] -#![feature(unboxed_closures)] -#![feature(associated_types)] #![no_std] #![experimental] -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate core; - -#[cfg(not(stage0))] #[macro_use] extern crate core; -#[cfg(all(test, stage0))] -#[phase(plugin, link)] -extern crate std; - -#[cfg(all(test, not(stage0)))] -#[macro_use] -extern crate std; - -#[cfg(all(test, stage0))] -#[phase(plugin, link)] -extern crate log; - -#[cfg(all(test, not(stage0)))] -#[macro_use] -extern crate log; +#[cfg(test)] #[macro_use] extern crate std; +#[cfg(test)] #[macro_use] extern crate log; use core::prelude::*; @@ -161,7 +140,7 @@ pub trait Rng : Sized { /// /// let mut v = [0u8; 13579]; /// thread_rng().fill_bytes(&mut v); - /// println!("{}", v.as_slice()); + /// println!("{:?}", v.as_slice()); /// ``` fn fill_bytes(&mut self, dest: &mut [u8]) { // this could, in theory, be done by transmuting dest to a @@ -197,7 +176,7 @@ pub trait Rng : Sized { /// let mut rng = thread_rng(); /// let x: uint = rng.gen(); /// println!("{}", x); - /// println!("{}", rng.gen::<(f64, bool)>()); + /// println!("{:?}", rng.gen::<(f64, bool)>()); /// ``` #[inline(always)] fn gen(&mut self) -> T { @@ -215,8 +194,8 @@ pub trait Rng : Sized { /// let mut rng = thread_rng(); /// let x = rng.gen_iter::().take(10).collect::>(); /// println!("{}", x); - /// println!("{}", rng.gen_iter::<(f64, bool)>().take(5) - /// .collect::>()); + /// println!("{:?}", rng.gen_iter::<(f64, bool)>().take(5) + /// .collect::>()); /// ``` fn gen_iter<'a, T: Rand>(&'a mut self) -> Generator<'a, T, Self> { Generator { rng: self } @@ -289,9 +268,9 @@ pub trait Rng : Sized { /// /// let choices = [1i, 2, 4, 8, 16, 32]; /// let mut rng = thread_rng(); - /// println!("{}", rng.choose(&choices)); - /// # // replace with slicing syntax when it's stable! - /// assert_eq!(rng.choose(choices.slice_to(0)), None); + /// println!("{:?}", rng.choose(&choices)); + /// # // uncomment when slicing syntax is stable + /// //assert_eq!(rng.choose(choices.index(&(0..0))), None); /// ``` fn choose<'a, T>(&mut self, values: &'a [T]) -> Option<&'a T> { if values.is_empty() { @@ -516,7 +495,9 @@ pub struct Closed01(pub F); mod std { pub use core::{option, fmt}; // panic!() pub use core::clone; // derive Clone - pub use core::kinds; + #[cfg(stage0)] + pub use core::marker as kinds; + pub use core::marker; } #[cfg(test)] diff --git a/src/librbml/io.rs b/src/librbml/io.rs index de06471e65ed9..5ebec32d73384 100644 --- a/src/librbml/io.rs +++ b/src/librbml/io.rs @@ -95,7 +95,7 @@ impl Writer for SeekableMemWriter { // there (left), and what will be appended on the end (right) let cap = self.buf.len() - self.pos; let (left, right) = if cap <= buf.len() { - (buf[..cap], buf[cap..]) + (buf.index(&(0..cap)), buf.index(&(cap..))) } else { let result: (_, &[_]) = (buf, &[]); result diff --git a/src/librbml/lib.rs b/src/librbml/lib.rs index e57542a6d14de..a66d1dd08c1eb 100644 --- a/src/librbml/lib.rs +++ b/src/librbml/lib.rs @@ -24,19 +24,10 @@ html_root_url = "http://doc.rust-lang.org/nightly/", html_playground_url = "http://play.rust-lang.org/")] #![allow(unknown_features)] -#![feature(macro_rules, phase, slicing_syntax, globs)] -#![feature(unboxed_closures, associated_types)] -#![allow(missing_docs)] +#![feature(slicing_syntax)] extern crate serialize; - -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate log; - -#[cfg(not(stage0))] -#[macro_use] -extern crate log; +#[macro_use] extern crate log; #[cfg(test)] extern crate test; @@ -65,7 +56,7 @@ impl<'doc> Doc<'doc> { } pub fn as_str_slice<'a>(&'a self) -> &'a str { - str::from_utf8(self.data[self.start..self.end]).unwrap() + str::from_utf8(self.data.index(&(self.start..self.end))).unwrap() } pub fn as_str(&self) -> String { @@ -147,7 +138,7 @@ pub mod reader { match $e { Ok(e) => e, Err(e) => { - debug!("ignored error: {}", e); + debug!("ignored error: {:?}", e); return $r } } @@ -256,7 +247,7 @@ pub mod reader { match maybe_get_doc(d, tg) { Some(d) => d, None => { - error!("failed to find block with tag {}", tg); + error!("failed to find block with tag {:?}", tg); panic!(); } } @@ -300,7 +291,7 @@ pub mod reader { pub fn with_doc_data(d: Doc, f: F) -> T where F: FnOnce(&[u8]) -> T, { - f(d.data[d.start..d.end]) + f(d.data.index(&(d.start..d.end))) } @@ -351,8 +342,8 @@ pub mod reader { self.pos = r_doc.end; let str = r_doc.as_str_slice(); if lbl != str { - return Err(Expected(format!("Expected label {} but \ - found {}", lbl, str))); + return Err(Expected(format!("Expected label {:?} but \ + found {:?}", lbl, str))); } } } @@ -360,14 +351,14 @@ pub mod reader { } fn next_doc(&mut self, exp_tag: EbmlEncoderTag) -> DecodeResult> { - debug!(". next_doc(exp_tag={})", exp_tag); + debug!(". next_doc(exp_tag={:?})", exp_tag); if self.pos >= self.parent.end { return Err(Expected(format!("no more documents in \ current node!"))); } let TaggedDoc { tag: r_tag, doc: r_doc } = try!(doc_at(self.parent.data, self.pos)); - debug!("self.parent={}-{} self.pos={} r_tag={} r_doc={}-{}", + debug!("self.parent={:?}-{:?} self.pos={:?} r_tag={:?} r_doc={:?}-{:?}", self.parent.start, self.parent.end, self.pos, @@ -375,8 +366,8 @@ pub mod reader { r_doc.start, r_doc.end); if r_tag != (exp_tag as uint) { - return Err(Expected(format!("expected EBML doc with tag {} but \ - found tag {}", exp_tag, r_tag))); + return Err(Expected(format!("expected EBML doc with tag {:?} but \ + found tag {:?}", exp_tag, r_tag))); } if r_doc.end > self.parent.end { return Err(Expected(format!("invalid EBML, child extends to \ @@ -403,7 +394,7 @@ pub mod reader { fn _next_uint(&mut self, exp_tag: EbmlEncoderTag) -> DecodeResult { let r = doc_as_u32(try!(self.next_doc(exp_tag))); - debug!("_next_uint exp_tag={} result={}", exp_tag, r); + debug!("_next_uint exp_tag={:?} result={:?}", exp_tag, r); Ok(r as uint) } @@ -424,263 +415,6 @@ pub mod reader { } } - #[cfg(stage0)] - impl<'doc> serialize::Decoder for Decoder<'doc> { - fn read_nil(&mut self) -> DecodeResult<()> { Ok(()) } - - fn read_u64(&mut self) -> DecodeResult { Ok(doc_as_u64(try!(self.next_doc(EsU64)))) } - fn read_u32(&mut self) -> DecodeResult { Ok(doc_as_u32(try!(self.next_doc(EsU32)))) } - fn read_u16(&mut self) -> DecodeResult { Ok(doc_as_u16(try!(self.next_doc(EsU16)))) } - fn read_u8 (&mut self) -> DecodeResult { Ok(doc_as_u8 (try!(self.next_doc(EsU8 )))) } - fn read_uint(&mut self) -> DecodeResult { - let v = doc_as_u64(try!(self.next_doc(EsUint))); - if v > (::std::uint::MAX as u64) { - Err(IntTooBig(v as uint)) - } else { - Ok(v as uint) - } - } - - fn read_i64(&mut self) -> DecodeResult { - Ok(doc_as_u64(try!(self.next_doc(EsI64))) as i64) - } - fn read_i32(&mut self) -> DecodeResult { - Ok(doc_as_u32(try!(self.next_doc(EsI32))) as i32) - } - fn read_i16(&mut self) -> DecodeResult { - Ok(doc_as_u16(try!(self.next_doc(EsI16))) as i16) - } - fn read_i8 (&mut self) -> DecodeResult { - Ok(doc_as_u8(try!(self.next_doc(EsI8 ))) as i8) - } - fn read_int(&mut self) -> DecodeResult { - let v = doc_as_u64(try!(self.next_doc(EsInt))) as i64; - if v > (int::MAX as i64) || v < (int::MIN as i64) { - debug!("FIXME \\#6122: Removing this makes this function miscompile"); - Err(IntTooBig(v as uint)) - } else { - Ok(v as int) - } - } - - fn read_bool(&mut self) -> DecodeResult { - Ok(doc_as_u8(try!(self.next_doc(EsBool))) != 0) - } - - fn read_f64(&mut self) -> DecodeResult { - let bits = doc_as_u64(try!(self.next_doc(EsF64))); - Ok(unsafe { transmute(bits) }) - } - fn read_f32(&mut self) -> DecodeResult { - let bits = doc_as_u32(try!(self.next_doc(EsF32))); - Ok(unsafe { transmute(bits) }) - } - fn read_char(&mut self) -> DecodeResult { - Ok(char::from_u32(doc_as_u32(try!(self.next_doc(EsChar)))).unwrap()) - } - fn read_str(&mut self) -> DecodeResult { - Ok(try!(self.next_doc(EsStr)).as_str()) - } - - // Compound types: - fn read_enum(&mut self, name: &str, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>) -> DecodeResult, - { - debug!("read_enum({})", name); - try!(self._check_label(name)); - - let doc = try!(self.next_doc(EsEnum)); - - let (old_parent, old_pos) = (self.parent, self.pos); - self.parent = doc; - self.pos = self.parent.start; - - let result = try!(f(self)); - - self.parent = old_parent; - self.pos = old_pos; - Ok(result) - } - - fn read_enum_variant(&mut self, _: &[&str], - mut f: F) -> DecodeResult - where F: FnMut(&mut Decoder<'doc>, uint) -> DecodeResult, - { - debug!("read_enum_variant()"); - let idx = try!(self._next_uint(EsEnumVid)); - debug!(" idx={}", idx); - - let doc = try!(self.next_doc(EsEnumBody)); - - let (old_parent, old_pos) = (self.parent, self.pos); - self.parent = doc; - self.pos = self.parent.start; - - let result = try!(f(self, idx)); - - self.parent = old_parent; - self.pos = old_pos; - Ok(result) - } - - fn read_enum_variant_arg(&mut self, idx: uint, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>) -> DecodeResult, - { - debug!("read_enum_variant_arg(idx={})", idx); - f(self) - } - - fn read_enum_struct_variant(&mut self, _: &[&str], - mut f: F) -> DecodeResult - where F: FnMut(&mut Decoder<'doc>, uint) -> DecodeResult, - { - debug!("read_enum_struct_variant()"); - let idx = try!(self._next_uint(EsEnumVid)); - debug!(" idx={}", idx); - - let doc = try!(self.next_doc(EsEnumBody)); - - let (old_parent, old_pos) = (self.parent, self.pos); - self.parent = doc; - self.pos = self.parent.start; - - let result = try!(f(self, idx)); - - self.parent = old_parent; - self.pos = old_pos; - Ok(result) - } - - fn read_enum_struct_variant_field(&mut self, - name: &str, - idx: uint, - f: F) - -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>) -> DecodeResult, - { - debug!("read_enum_struct_variant_arg(name={}, idx={})", name, idx); - f(self) - } - - fn read_struct(&mut self, name: &str, _: uint, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>) -> DecodeResult, - { - debug!("read_struct(name={})", name); - f(self) - } - - fn read_struct_field(&mut self, name: &str, idx: uint, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>) -> DecodeResult, - { - debug!("read_struct_field(name={}, idx={})", name, idx); - try!(self._check_label(name)); - f(self) - } - - fn read_tuple(&mut self, tuple_len: uint, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>) -> DecodeResult, - { - debug!("read_tuple()"); - self.read_seq(move |d, len| { - if len == tuple_len { - f(d) - } else { - Err(Expected(format!("Expected tuple of length `{}`, \ - found tuple of length `{}`", tuple_len, len))) - } - }) - } - - fn read_tuple_arg(&mut self, idx: uint, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>) -> DecodeResult, - { - debug!("read_tuple_arg(idx={})", idx); - self.read_seq_elt(idx, f) - } - - fn read_tuple_struct(&mut self, name: &str, len: uint, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>) -> DecodeResult, - { - debug!("read_tuple_struct(name={})", name); - self.read_tuple(len, f) - } - - fn read_tuple_struct_arg(&mut self, - idx: uint, - f: F) - -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>) -> DecodeResult, - { - debug!("read_tuple_struct_arg(idx={})", idx); - self.read_tuple_arg(idx, f) - } - - fn read_option(&mut self, mut f: F) -> DecodeResult where - F: FnMut(&mut Decoder<'doc>, bool) -> DecodeResult, - { - debug!("read_option()"); - self.read_enum("Option", move |this| { - this.read_enum_variant(&["None", "Some"], move |this, idx| { - match idx { - 0 => f(this, false), - 1 => f(this, true), - _ => { - Err(Expected(format!("Expected None or Some"))) - } - } - }) - }) - } - - fn read_seq(&mut self, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>, uint) -> DecodeResult, - { - debug!("read_seq()"); - self.push_doc(EsVec, move |d| { - let len = try!(d._next_uint(EsVecLen)); - debug!(" len={}", len); - f(d, len) - }) - } - - fn read_seq_elt(&mut self, idx: uint, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>) -> DecodeResult, - { - debug!("read_seq_elt(idx={})", idx); - self.push_doc(EsVecElt, f) - } - - fn read_map(&mut self, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>, uint) -> DecodeResult, - { - debug!("read_map()"); - self.push_doc(EsMap, move |d| { - let len = try!(d._next_uint(EsMapLen)); - debug!(" len={}", len); - f(d, len) - }) - } - - fn read_map_elt_key(&mut self, idx: uint, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>) -> DecodeResult, - { - debug!("read_map_elt_key(idx={})", idx); - self.push_doc(EsMapKey, f) - } - - fn read_map_elt_val(&mut self, idx: uint, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder<'doc>) -> DecodeResult, - { - debug!("read_map_elt_val(idx={})", idx); - self.push_doc(EsMapVal, f) - } - - fn error(&mut self, err: &str) -> Error { - ApplicationError(err.to_string()) - } - } - - #[cfg(not(stage0))] impl<'doc> serialize::Decoder for Decoder<'doc> { type Error = Error; fn read_nil(&mut self) -> DecodeResult<()> { Ok(()) } @@ -1007,7 +741,7 @@ pub mod writer { } pub fn start_tag(&mut self, tag_id: uint) -> EncodeResult { - debug!("Start tag {}", tag_id); + debug!("Start tag {:?}", tag_id); // Write the enum ID: try!(write_vuint(self.writer, tag_id)); @@ -1026,7 +760,7 @@ pub mod writer { try!(write_sized_vuint(self.writer, size, 4u)); let r = try!(self.writer.seek(cur_pos as i64, io::SeekSet)); - debug!("End tag (size = {})", size); + debug!("End tag (size = {:?})", size); Ok(r) } @@ -1093,12 +827,12 @@ pub mod writer { } pub fn wr_bytes(&mut self, b: &[u8]) -> EncodeResult { - debug!("Write {} bytes", b.len()); + debug!("Write {:?} bytes", b.len()); self.writer.write(b) } pub fn wr_str(&mut self, s: &str) -> EncodeResult { - debug!("Write str: {}", s); + debug!("Write str: {:?}", s); self.writer.write(s.as_bytes()) } } @@ -1137,209 +871,6 @@ pub mod writer { } } - #[cfg(stage0)] - impl<'a, W: Writer + Seek> serialize::Encoder for Encoder<'a, W> { - - fn emit_nil(&mut self) -> EncodeResult { - Ok(()) - } - - fn emit_uint(&mut self, v: uint) -> EncodeResult { - self.wr_tagged_u64(EsUint as uint, v as u64) - } - fn emit_u64(&mut self, v: u64) -> EncodeResult { - self.wr_tagged_u64(EsU64 as uint, v) - } - fn emit_u32(&mut self, v: u32) -> EncodeResult { - self.wr_tagged_u32(EsU32 as uint, v) - } - fn emit_u16(&mut self, v: u16) -> EncodeResult { - self.wr_tagged_u16(EsU16 as uint, v) - } - fn emit_u8(&mut self, v: u8) -> EncodeResult { - self.wr_tagged_u8(EsU8 as uint, v) - } - - fn emit_int(&mut self, v: int) -> EncodeResult { - self.wr_tagged_i64(EsInt as uint, v as i64) - } - fn emit_i64(&mut self, v: i64) -> EncodeResult { - self.wr_tagged_i64(EsI64 as uint, v) - } - fn emit_i32(&mut self, v: i32) -> EncodeResult { - self.wr_tagged_i32(EsI32 as uint, v) - } - fn emit_i16(&mut self, v: i16) -> EncodeResult { - self.wr_tagged_i16(EsI16 as uint, v) - } - fn emit_i8(&mut self, v: i8) -> EncodeResult { - self.wr_tagged_i8(EsI8 as uint, v) - } - - fn emit_bool(&mut self, v: bool) -> EncodeResult { - self.wr_tagged_u8(EsBool as uint, v as u8) - } - - fn emit_f64(&mut self, v: f64) -> EncodeResult { - let bits = unsafe { mem::transmute(v) }; - self.wr_tagged_u64(EsF64 as uint, bits) - } - fn emit_f32(&mut self, v: f32) -> EncodeResult { - let bits = unsafe { mem::transmute(v) }; - self.wr_tagged_u32(EsF32 as uint, bits) - } - fn emit_char(&mut self, v: char) -> EncodeResult { - self.wr_tagged_u32(EsChar as uint, v as u32) - } - - fn emit_str(&mut self, v: &str) -> EncodeResult { - self.wr_tagged_str(EsStr as uint, v) - } - - fn emit_enum(&mut self, name: &str, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - try!(self._emit_label(name)); - try!(self.start_tag(EsEnum as uint)); - try!(f(self)); - self.end_tag() - } - - fn emit_enum_variant(&mut self, - _: &str, - v_id: uint, - _: uint, - f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - try!(self._emit_tagged_uint(EsEnumVid, v_id)); - try!(self.start_tag(EsEnumBody as uint)); - try!(f(self)); - self.end_tag() - } - - fn emit_enum_variant_arg(&mut self, _: uint, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - f(self) - } - - fn emit_enum_struct_variant(&mut self, - v_name: &str, - v_id: uint, - cnt: uint, - f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - self.emit_enum_variant(v_name, v_id, cnt, f) - } - - fn emit_enum_struct_variant_field(&mut self, - _: &str, - idx: uint, - f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - self.emit_enum_variant_arg(idx, f) - } - - fn emit_struct(&mut self, _: &str, _len: uint, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - f(self) - } - - fn emit_struct_field(&mut self, name: &str, _: uint, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - try!(self._emit_label(name)); - f(self) - } - - fn emit_tuple(&mut self, len: uint, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - self.emit_seq(len, f) - } - fn emit_tuple_arg(&mut self, idx: uint, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - self.emit_seq_elt(idx, f) - } - - fn emit_tuple_struct(&mut self, _: &str, len: uint, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - self.emit_seq(len, f) - } - fn emit_tuple_struct_arg(&mut self, idx: uint, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - self.emit_seq_elt(idx, f) - } - - fn emit_option(&mut self, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - self.emit_enum("Option", f) - } - fn emit_option_none(&mut self) -> EncodeResult { - self.emit_enum_variant("None", 0, 0, |_| Ok(())) - } - fn emit_option_some(&mut self, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - - self.emit_enum_variant("Some", 1, 1, f) - } - - fn emit_seq(&mut self, len: uint, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - - try!(self.start_tag(EsVec as uint)); - try!(self._emit_tagged_uint(EsVecLen, len)); - try!(f(self)); - self.end_tag() - } - - fn emit_seq_elt(&mut self, _idx: uint, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - - try!(self.start_tag(EsVecElt as uint)); - try!(f(self)); - self.end_tag() - } - - fn emit_map(&mut self, len: uint, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - - try!(self.start_tag(EsMap as uint)); - try!(self._emit_tagged_uint(EsMapLen, len)); - try!(f(self)); - self.end_tag() - } - - fn emit_map_elt_key(&mut self, _idx: uint, mut f: F) -> EncodeResult where - F: FnMut(&mut Encoder<'a, W>) -> EncodeResult, - { - - try!(self.start_tag(EsMapKey as uint)); - try!(f(self)); - self.end_tag() - } - - fn emit_map_elt_val(&mut self, _idx: uint, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a, W>) -> EncodeResult, - { - try!(self.start_tag(EsMapVal as uint)); - try!(f(self)); - self.end_tag() - } - } - #[cfg(not(stage0))] impl<'a, W: Writer + Seek> serialize::Encoder for Encoder<'a, W> { type Error = io::IoError; @@ -1608,7 +1139,7 @@ mod tests { #[test] fn test_option_int() { fn test_v(v: Option) { - debug!("v == {}", v); + debug!("v == {:?}", v); let mut wr = SeekableMemWriter::new(); { let mut rbml_w = writer::Encoder::new(&mut wr); @@ -1617,7 +1148,7 @@ mod tests { let rbml_doc = Doc::new(wr.get_ref()); let mut deser = reader::Decoder::new(rbml_doc); let v1 = Decodable::decode(&mut deser).unwrap(); - debug!("v1 == {}", v1); + debug!("v1 == {:?}", v1); assert_eq!(v, v1); } diff --git a/src/libregex/compile.rs b/src/libregex/compile.rs index d29a7a425c116..5803da1d3350d 100644 --- a/src/libregex/compile.rs +++ b/src/libregex/compile.rs @@ -105,7 +105,7 @@ impl Program { // This is a bit hacky since we have to skip over the initial // 'Save' instruction. let mut pre = String::with_capacity(5); - for inst in c.insts[1..].iter() { + for inst in c.insts.index(&(1..)).iter() { match *inst { OneChar(c, FLAG_EMPTY) => pre.push(c), _ => break diff --git a/src/libregex/lib.rs b/src/libregex/lib.rs index 0084be49b5619..c039abc9aff2a 100644 --- a/src/libregex/lib.rs +++ b/src/libregex/lib.rs @@ -13,7 +13,6 @@ //! Regular expressions implemented in Rust //! //! For official documentation, see the rust-lang/regex crate - #![crate_name = "regex"] #![crate_type = "rlib"] #![crate_type = "dylib"] @@ -24,9 +23,7 @@ html_playground_url = "http://play.rust-lang.org/")] #![allow(unknown_features)] -#![feature(macro_rules, slicing_syntax, globs)] -#![feature(unboxed_closures)] -#![feature(associated_types)] +#![feature(slicing_syntax)] #![deny(missing_docs)] #[cfg(test)] diff --git a/src/libregex/parse.rs b/src/libregex/parse.rs index 07da86afcc971..dd11d42b8aad6 100644 --- a/src/libregex/parse.rs +++ b/src/libregex/parse.rs @@ -18,6 +18,7 @@ use std::cmp; use std::fmt; use std::iter; use std::num; +use std::ops::Index; /// Static data containing Unicode ranges for general categories and scripts. use unicode::regex::{UNICODE_CLASSES, PERLD, PERLS, PERLW}; @@ -39,7 +40,7 @@ pub struct Error { impl fmt::Show for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "Regex syntax error near position {}: {}", + write!(f, "Regex syntax error near position {}: {:?}", self.pos, self.msg) } } @@ -121,7 +122,7 @@ impl BuildAst { fn flags(&self) -> Flags { match *self { Paren(flags, _, _) => flags, - _ => panic!("Cannot get flags from {}", self), + _ => panic!("Cannot get flags from {:?}", self), } } @@ -129,7 +130,7 @@ impl BuildAst { match *self { Paren(_, 0, _) => None, Paren(_, c, _) => Some(c), - _ => panic!("Cannot get capture group from {}", self), + _ => panic!("Cannot get capture group from {:?}", self), } } @@ -143,7 +144,7 @@ impl BuildAst { Some(name.clone()) } } - _ => panic!("Cannot get capture name from {}", self), + _ => panic!("Cannot get capture name from {:?}", self), } } @@ -157,7 +158,7 @@ impl BuildAst { fn unwrap(self) -> Result { match self { Expr(x) => Ok(x), - _ => panic!("Tried to unwrap non-AST item: {}", self), + _ => panic!("Tried to unwrap non-AST item: {:?}", self), } } } @@ -284,8 +285,8 @@ impl<'a> Parser<'a> { match self.next_char() { true => Ok(()), false => { - self.err(format!("Expected {} but got EOF.", - expected)[]) + self.err(format!("Expected {:?} but got EOF.", + expected).index(&FullRange)) } } } @@ -293,11 +294,11 @@ impl<'a> Parser<'a> { fn expect(&mut self, expected: char) -> Result<(), Error> { match self.next_char() { true if self.cur() == expected => Ok(()), - true => self.err(format!("Expected '{}' but got '{}'.", - expected, self.cur())[]), + true => self.err(format!("Expected '{:?}' but got '{:?}'.", + expected, self.cur()).index(&FullRange)), false => { - self.err(format!("Expected '{}' but got EOF.", - expected)[]) + self.err(format!("Expected '{:?}' but got EOF.", + expected).index(&FullRange)) } } } @@ -394,7 +395,7 @@ impl<'a> Parser<'a> { continue } Some(ast) => - panic!("Expected Class AST but got '{}'", ast), + panic!("Expected Class AST but got '{:?}'", ast), // Just drop down and try to add as a regular character. None => {}, }, @@ -409,7 +410,7 @@ impl<'a> Parser<'a> { return self.err( "\\A, \\z, \\b and \\B are not valid escape \ sequences inside a character class."), - ast => panic!("Unexpected AST item '{}'", ast), + ast => panic!("Unexpected AST item '{:?}'", ast), } } ']' if ranges.len() > 0 || alts.len() > 0 => { @@ -442,15 +443,15 @@ impl<'a> Parser<'a> { match try!(self.parse_escape()) { Literal(c3, _) => c2 = c3, // allow literal escapes below ast => - return self.err(format!("Expected a literal, but got {}.", - ast)[]), + return self.err(format!("Expected a literal, but got {:?}.", + ast).index(&FullRange)), } } if c2 < c { return self.err(format!("Invalid character class \ range '{}-{}'", c, - c2)[]) + c2).index(&FullRange)) } ranges.push((c, self.cur())) } else { @@ -488,7 +489,7 @@ impl<'a> Parser<'a> { FLAG_EMPTY }; let name = self.slice(name_start, closer - 1); - match find_class(ASCII_CLASSES, name[]) { + match find_class(ASCII_CLASSES, name.index(&FullRange)) { None => None, Some(ranges) => { self.chari = closer; @@ -512,19 +513,19 @@ impl<'a> Parser<'a> { None => { return self.err(format!("No closing brace for counted \ repetition starting at position \ - {}.", - start)[]) + {:?}.", + start).index(&FullRange)) } }; self.chari = closer; let greed = try!(self.get_next_greedy()); - let inner = self.chars[start+1..closer].iter().cloned() + let inner = self.chars.index(&((start+1)..closer)).iter().cloned() .collect::(); // Parse the min and max values from the regex. let (mut min, mut max): (uint, Option); if !inner.contains(",") { - min = try!(self.parse_uint(inner[])); + min = try!(self.parse_uint(inner.index(&FullRange))); max = Some(min); } else { let pieces: Vec<&str> = inner.splitn(1, ',').collect(); @@ -546,19 +547,19 @@ impl<'a> Parser<'a> { if min > MAX_REPEAT { return self.err(format!( "{} exceeds maximum allowed repetitions ({})", - min, MAX_REPEAT)[]); + min, MAX_REPEAT).index(&FullRange)); } if max.is_some() { let m = max.unwrap(); if m > MAX_REPEAT { return self.err(format!( "{} exceeds maximum allowed repetitions ({})", - m, MAX_REPEAT)[]); + m, MAX_REPEAT).index(&FullRange)); } if m < min { return self.err(format!( "Max repetitions ({}) cannot be smaller than min \ - repetitions ({}).", m, min)[]); + repetitions ({}).", m, min).index(&FullRange)); } } @@ -622,7 +623,7 @@ impl<'a> Parser<'a> { Ok(AstClass(ranges, flags)) } _ => { - self.err(format!("Invalid escape sequence '\\\\{}'", c)[]) + self.err(format!("Invalid escape sequence '\\\\{}'", c).index(&FullRange)) } } } @@ -642,7 +643,7 @@ impl<'a> Parser<'a> { Some(i) => i, None => return self.err(format!( "Missing '}}' for unclosed '{{' at position {}", - self.chari)[]), + self.chari).index(&FullRange)), }; if closer - self.chari + 1 == 0 { return self.err("No Unicode class name found.") @@ -656,10 +657,10 @@ impl<'a> Parser<'a> { name = self.slice(self.chari + 1, self.chari + 2); self.chari += 1; } - match find_class(UNICODE_CLASSES, name[]) { + match find_class(UNICODE_CLASSES, name.index(&FullRange)) { None => { return self.err(format!("Could not find Unicode class '{}'", - name)[]) + name).index(&FullRange)) } Some(ranges) => { Ok(AstClass(ranges, negated | (self.flags & FLAG_NOCASE))) @@ -682,11 +683,11 @@ impl<'a> Parser<'a> { } } let s = self.slice(start, end); - match num::from_str_radix::(s[], 8) { + match num::from_str_radix::(s.index(&FullRange), 8) { Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)), None => { - self.err(format!("Could not parse '{}' as octal number.", - s)[]) + self.err(format!("Could not parse '{:?}' as octal number.", + s).index(&FullRange)) } } } @@ -704,12 +705,12 @@ impl<'a> Parser<'a> { None => { return self.err(format!("Missing '}}' for unclosed \ '{{' at position {}", - start)[]) + start).index(&FullRange)) } Some(i) => i, }; self.chari = closer; - self.parse_hex_digits(self.slice(start, closer)[]) + self.parse_hex_digits(self.slice(start, closer).index(&FullRange)) } // Parses a two-digit hex number. @@ -729,7 +730,7 @@ impl<'a> Parser<'a> { match num::from_str_radix::(s, 16) { Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)), None => { - self.err(format!("Could not parse '{}' as hex number.", s)[]) + self.err(format!("Could not parse '{}' as hex number.", s).index(&FullRange)) } } } @@ -755,7 +756,7 @@ impl<'a> Parser<'a> { } if self.names.contains(&name) { return self.err(format!("Duplicate capture group name '{}'.", - name)[]) + name).index(&FullRange)) } self.names.push(name.clone()); self.chari = closer; @@ -789,7 +790,7 @@ impl<'a> Parser<'a> { if sign < 0 { return self.err(format!( "Cannot negate flags twice in '{}'.", - self.slice(start, self.chari + 1))[]) + self.slice(start, self.chari + 1)).index(&FullRange)) } sign = -1; saw_flag = false; @@ -800,7 +801,7 @@ impl<'a> Parser<'a> { if !saw_flag { return self.err(format!( "A valid flag does not follow negation in '{}'", - self.slice(start, self.chari + 1))[]) + self.slice(start, self.chari + 1)).index(&FullRange)) } flags = flags ^ flags; } @@ -812,7 +813,7 @@ impl<'a> Parser<'a> { return Ok(()) } _ => return self.err(format!( - "Unrecognized flag '{}'.", self.cur())[]), + "Unrecognized flag '{}'.", self.cur()).index(&FullRange)), } } } @@ -910,7 +911,7 @@ impl<'a> Parser<'a> { Some(i) => Ok(i), None => { self.err(format!("Expected an unsigned integer but got '{}'.", - s)[]) + s).index(&FullRange)) } } } @@ -920,7 +921,7 @@ impl<'a> Parser<'a> { Some(c) => Ok(c), None => { self.err(format!("Could not decode '{}' to unicode \ - character.", n)[]) + character.", n).index(&FullRange)) } } } @@ -953,7 +954,7 @@ impl<'a> Parser<'a> { } fn slice(&self, start: uint, end: uint) -> String { - self.chars[start..end].iter().cloned().collect() + self.chars.index(&(start..end)).iter().cloned().collect() } } diff --git a/src/libregex/re.rs b/src/libregex/re.rs index 1840a3343e63c..37f9869f3bfaf 100644 --- a/src/libregex/re.rs +++ b/src/libregex/re.rs @@ -90,10 +90,19 @@ impl Clone for ExNative { } } +#[cfg(stage0)] +//FIXME: remove after stage0 snapshot impl fmt::Show for Regex { /// Shows the original regular expression. fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.as_str()) + fmt::String::fmt(self.as_str(), f) + } +} + +impl fmt::String for Regex { + /// Shows the original regular expression. + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self.as_str(), f) } } @@ -238,19 +247,19 @@ impl Regex { } let (s, e) = cap.pos(0).unwrap(); // captures only reports matches - new.push_str(text[last_match..s]); - new.push_str(rep.reg_replace(&cap)[]); + new.push_str(text.index(&(last_match..s))); + new.push_str(rep.reg_replace(&cap).index(&FullRange)); last_match = e; } - new.push_str(text[last_match..text.len()]); + new.push_str(text.index(&(last_match..text.len()))); return new; } /// Returns the original string of this regex. pub fn as_str<'a>(&'a self) -> &'a str { match *self { - Dynamic(ExDynamic { ref original, .. }) => original[], - Native(ExNative { ref original, .. }) => original[], + Dynamic(ExDynamic { ref original, .. }) => original.index(&FullRange), + Native(ExNative { ref original, .. }) => original.index(&FullRange), } } @@ -347,13 +356,13 @@ impl<'r, 't> Iterator for RegexSplits<'r, 't> { if self.last >= text.len() { None } else { - let s = text[self.last..text.len()]; + let s = text.index(&(self.last..text.len())); self.last = text.len(); Some(s) } } Some((s, e)) => { - let matched = text[self.last..s]; + let matched = text.index(&(self.last..s)); self.last = e; Some(matched) } @@ -384,7 +393,7 @@ impl<'r, 't> Iterator for RegexSplitsN<'r, 't> { } else { self.cur += 1; if self.cur >= self.limit { - Some(text[self.splits.last..text.len()]) + Some(text.index(&(self.splits.last..text.len()))) } else { self.splits.next() } @@ -517,7 +526,7 @@ impl<'t> Captures<'t> { }) }); let re = Regex::new(r"\$\$").unwrap(); - re.replace_all(text[], NoExpand("$")) + re.replace_all(text.index(&FullRange), NoExpand("$")) } /// Returns the number of captured groups. diff --git a/src/libregex/test/mod.rs b/src/libregex/test/mod.rs index 48cc35aa5d93c..e11094b117471 100644 --- a/src/libregex/test/mod.rs +++ b/src/libregex/test/mod.rs @@ -12,7 +12,7 @@ macro_rules! regex { ($re:expr) => ( match ::regex::Regex::new($re) { Ok(re) => re, - Err(err) => panic!("{}", err), + Err(err) => panic!("{:?}", err), } ); } diff --git a/src/libregex/test/tests.rs b/src/libregex/test/tests.rs index d087814b7f401..b69420ac05bd1 100644 --- a/src/libregex/test/tests.rs +++ b/src/libregex/test/tests.rs @@ -159,10 +159,10 @@ macro_rules! mat { // actual capture groups to match test set. let mut sgot = got.as_slice(); if sgot.len() > expected.len() { - sgot = sgot[0..expected.len()] + sgot = &sgot[..expected.len()] } if expected != sgot { - panic!("For RE '{}' against '{}', expected '{}' but got '{}'", + panic!("For RE '{}' against '{}', expected '{:?}' but got '{:?}'", $re, text, expected, sgot); } } diff --git a/src/libregex/vm.rs b/src/libregex/vm.rs index 914167019d209..04c430da4d290 100644 --- a/src/libregex/vm.rs +++ b/src/libregex/vm.rs @@ -152,7 +152,7 @@ impl<'r, 't> Nfa<'r, 't> { // out early. if self.prog.prefix.len() > 0 && clist.size == 0 { let needle = self.prog.prefix.as_bytes(); - let haystack = self.input.as_bytes()[self.ic..]; + let haystack = self.input.as_bytes().index(&(self.ic..)); match find_prefix(needle, haystack) { None => break, Some(i) => { @@ -503,7 +503,8 @@ impl Threads { #[inline] fn groups<'r>(&'r mut self, i: uint) -> &'r mut [Option] { - self.queue[i].groups.as_mut_slice() + let q = &mut self.queue[i]; + q.groups.as_mut_slice() } } diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index 3ed712b15dfdc..a3a041c2497c7 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -23,12 +23,10 @@ html_root_url = "http://doc.rust-lang.org/nightly/")] #![allow(unknown_features)] -#![feature(default_type_params, globs, macro_rules, phase, quote)] +#![feature(quote)] #![feature(slicing_syntax, unsafe_destructor)] #![feature(rustc_diagnostic_macros)] -#![feature(unboxed_closures)] -#![feature(old_orphan_check)] -#![feature(associated_types)] +#![feature(old_impl_check)] extern crate arena; extern crate flate; @@ -40,22 +38,8 @@ extern crate rustc_back; extern crate serialize; extern crate rbml; extern crate collections; - -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate log; - -#[cfg(not(stage0))] -#[macro_use] -extern crate log; - -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate syntax; - -#[cfg(not(stage0))] -#[macro_use] -extern crate syntax; +#[macro_use] extern crate log; +#[macro_use] extern crate syntax; extern crate "serialize" as rustc_serialize; // used by deriving diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 425e34cd9f042..1af8e2f29ebd7 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -46,7 +46,7 @@ use syntax::ast_util::is_shift_binop; use syntax::attr::{self, AttrMetaMethods}; use syntax::codemap::{Span, DUMMY_SP}; use syntax::parse::token; -use syntax::ast::{TyI, TyU, TyI8, TyU8, TyI16, TyU16, TyI32, TyU32, TyI64, TyU64}; +use syntax::ast::{TyIs, TyUs, TyI8, TyU8, TyI16, TyU16, TyI32, TyU32, TyI64, TyU64}; use syntax::ast_util; use syntax::ptr::P; use syntax::visit::{self, Visitor}; @@ -216,7 +216,7 @@ impl LintPass for TypeLimits { match lit.node { ast::LitInt(v, ast::SignedIntLit(_, ast::Plus)) | ast::LitInt(v, ast::UnsuffixedIntLit(ast::Plus)) => { - let int_type = if t == ast::TyI { + let int_type = if t == ast::TyIs { cx.sess().target.int_type } else { t }; let (min, max) = int_ty_range(int_type); @@ -233,7 +233,7 @@ impl LintPass for TypeLimits { }; }, ty::ty_uint(t) => { - let uint_type = if t == ast::TyU { + let uint_type = if t == ast::TyUs { cx.sess().target.uint_type } else { t }; let (min, max) = uint_ty_range(uint_type); @@ -296,7 +296,7 @@ impl LintPass for TypeLimits { // warnings are consistent between 32- and 64-bit platforms fn int_ty_range(int_ty: ast::IntTy) -> (i64, i64) { match int_ty { - ast::TyI => (i64::MIN, i64::MAX), + ast::TyIs=> (i64::MIN, i64::MAX), ast::TyI8 => (i8::MIN as i64, i8::MAX as i64), ast::TyI16 => (i16::MIN as i64, i16::MAX as i64), ast::TyI32 => (i32::MIN as i64, i32::MAX as i64), @@ -306,7 +306,7 @@ impl LintPass for TypeLimits { fn uint_ty_range(uint_ty: ast::UintTy) -> (u64, u64) { match uint_ty { - ast::TyU => (u64::MIN, u64::MAX), + ast::TyUs=> (u64::MIN, u64::MAX), ast::TyU8 => (u8::MIN as u64, u8::MAX as u64), ast::TyU16 => (u16::MIN as u64, u16::MAX as u64), ast::TyU32 => (u32::MIN as u64, u32::MAX as u64), @@ -323,7 +323,7 @@ impl LintPass for TypeLimits { fn int_ty_bits(int_ty: ast::IntTy, target_int_ty: ast::IntTy) -> u64 { match int_ty { - ast::TyI => int_ty_bits(target_int_ty, target_int_ty), + ast::TyIs=> int_ty_bits(target_int_ty, target_int_ty), ast::TyI8 => i8::BITS as u64, ast::TyI16 => i16::BITS as u64, ast::TyI32 => i32::BITS as u64, @@ -333,7 +333,7 @@ impl LintPass for TypeLimits { fn uint_ty_bits(uint_ty: ast::UintTy, target_uint_ty: ast::UintTy) -> u64 { match uint_ty { - ast::TyU => uint_ty_bits(target_uint_ty, target_uint_ty), + ast::TyUs=> uint_ty_bits(target_uint_ty, target_uint_ty), ast::TyU8 => u8::BITS as u64, ast::TyU16 => u16::BITS as u64, ast::TyU32 => u32::BITS as u64, @@ -404,14 +404,14 @@ struct ImproperCTypesVisitor<'a, 'tcx: 'a> { impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { fn check_def(&mut self, sp: Span, ty_id: ast::NodeId, path_id: ast::NodeId) { match self.cx.tcx.def_map.borrow()[path_id].clone() { - def::DefPrimTy(ast::TyInt(ast::TyI)) => { + def::DefPrimTy(ast::TyInt(ast::TyIs)) => { self.cx.span_lint(IMPROPER_CTYPES, sp, - "found rust type `int` in foreign module, while \ + "found rust type `isize` in foreign module, while \ libc::c_int or libc::c_long should be used"); } - def::DefPrimTy(ast::TyUint(ast::TyU)) => { + def::DefPrimTy(ast::TyUint(ast::TyUs)) => { self.cx.span_lint(IMPROPER_CTYPES, sp, - "found rust type `uint` in foreign module, while \ + "found rust type `usize` in foreign module, while \ libc::c_uint or libc::c_ulong should be used"); } def::DefTy(..) => { @@ -506,7 +506,7 @@ impl BoxPointers { if n_uniq > 0 { let s = ty_to_string(cx.tcx, ty); let m = format!("type uses owned (Box type) pointers: {}", s); - cx.span_lint(BOX_POINTERS, span, m[]); + cx.span_lint(BOX_POINTERS, span, m.index(&FullRange)); } } } @@ -586,7 +586,7 @@ impl LintPass for RawPointerDerive { } fn check_item(&mut self, cx: &Context, item: &ast::Item) { - if !attr::contains_name(item.attrs[], "automatically_derived") { + if !attr::contains_name(item.attrs.index(&FullRange), "automatically_derived") { return } let did = match item.node { @@ -669,6 +669,7 @@ impl LintPass for UnusedAttributes { // FIXME: #19470 this shouldn't be needed forever "old_orphan_check", + "old_impl_check", ]; static CRATE_ATTRS: &'static [&'static str] = &[ @@ -769,11 +770,11 @@ impl LintPass for UnusedResults { ty::ty_enum(did, _) => { if ast_util::is_local(did) { if let ast_map::NodeItem(it) = cx.tcx.map.get(did.node) { - warned |= check_must_use(cx, it.attrs[], s.span); + warned |= check_must_use(cx, it.attrs.index(&FullRange), s.span); } } else { csearch::get_item_attrs(&cx.sess().cstore, did, |attrs| { - warned |= check_must_use(cx, attrs[], s.span); + warned |= check_must_use(cx, attrs.index(&FullRange), s.span); }); } } @@ -795,7 +796,7 @@ impl LintPass for UnusedResults { msg.push_str(s.get()); } } - cx.span_lint(UNUSED_MUST_USE, sp, msg[]); + cx.span_lint(UNUSED_MUST_USE, sp, msg.index(&FullRange)); return true; } } @@ -841,7 +842,7 @@ impl NonCamelCaseTypes { } else { format!("{} `{}` should have a camel case name such as `{}`", sort, s, c) }; - cx.span_lint(NON_CAMEL_CASE_TYPES, span, m[]); + cx.span_lint(NON_CAMEL_CASE_TYPES, span, m.index(&FullRange)); } } } @@ -981,7 +982,7 @@ impl NonSnakeCase { if !is_snake_case(ident) { cx.span_lint(NON_SNAKE_CASE, span, format!("{} `{}` should have a snake case name such as `{}`", - sort, s, to_snake_case(s.get()))[]); + sort, s, to_snake_case(s.get())).index(&FullRange)); } } } @@ -1068,7 +1069,7 @@ impl LintPass for NonUpperCaseGlobals { format!("static constant `{}` should have an uppercase name \ such as `{}`", s.get(), s.get().chars().map(|c| c.to_uppercase()) - .collect::()[])[]); + .collect::().index(&FullRange)).index(&FullRange)); } } _ => {} @@ -1085,7 +1086,7 @@ impl LintPass for NonUpperCaseGlobals { format!("static constant in pattern `{}` should have an uppercase \ name such as `{}`", s.get(), s.get().chars().map(|c| c.to_uppercase()) - .collect::()[])[]); + .collect::().index(&FullRange)).index(&FullRange)); } } _ => {} @@ -1110,7 +1111,7 @@ impl UnusedParens { if !necessary { cx.span_lint(UNUSED_PARENS, value.span, format!("unnecessary parentheses around {}", - msg)[]) + msg).index(&FullRange)) } } @@ -1212,7 +1213,7 @@ impl LintPass for UnusedImportBraces { let m = format!("braces around {} is unnecessary", token::get_ident(*name).get()); cx.span_lint(UNUSED_IMPORT_BRACES, view_item.span, - m[]); + m.index(&FullRange)); }, _ => () } @@ -1251,7 +1252,7 @@ impl LintPass for NonShorthandFieldPatterns { if ident.node.as_str() == fieldpat.node.ident.as_str() { cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span, format!("the `{}:` in this pattern is redundant and can \ - be removed", ident.node.as_str())[]) + be removed", ident.node.as_str()).index(&FullRange)) } } } @@ -1328,7 +1329,7 @@ impl UnusedMut { let ident = path1.node; if let ast::BindByValue(ast::MutMutable) = mode { if !token::get_ident(ident).get().starts_with("_") { - match mutables.entry(&ident.name.uint()) { + match mutables.entry(ident.name.uint()) { Vacant(entry) => { entry.insert(vec![id]); }, Occupied(mut entry) => { entry.get_mut().push(id); }, } @@ -1355,7 +1356,7 @@ impl LintPass for UnusedMut { fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { if let ast::ExprMatch(_, ref arms, _) = e.node { for a in arms.iter() { - self.check_unused_mut_pat(cx, a.pats[]) + self.check_unused_mut_pat(cx, a.pats.index(&FullRange)) } } } @@ -1476,7 +1477,7 @@ impl MissingDoc { }); if !has_doc { cx.span_lint(MISSING_DOCS, sp, - format!("missing documentation for {}", desc)[]); + format!("missing documentation for {}", desc).index(&FullRange)); } } } @@ -1490,7 +1491,7 @@ impl LintPass for MissingDoc { let doc_hidden = self.doc_hidden() || attrs.iter().any(|attr| { attr.check_name("doc") && match attr.meta_item_list() { None => false, - Some(l) => attr::contains_name(l[], "hidden"), + Some(l) => attr::contains_name(l.index(&FullRange), "hidden"), } }); self.doc_hidden_stack.push(doc_hidden); @@ -1512,7 +1513,7 @@ impl LintPass for MissingDoc { } fn check_crate(&mut self, cx: &Context, krate: &ast::Crate) { - self.check_missing_docs_attrs(cx, None, krate.attrs[], + self.check_missing_docs_attrs(cx, None, krate.attrs.index(&FullRange), krate.span, "crate"); } @@ -1526,7 +1527,7 @@ impl LintPass for MissingDoc { ast::ItemTy(..) => "a type alias", _ => return }; - self.check_missing_docs_attrs(cx, Some(it.id), it.attrs[], + self.check_missing_docs_attrs(cx, Some(it.id), it.attrs.index(&FullRange), it.span, desc); } @@ -1539,13 +1540,13 @@ impl LintPass for MissingDoc { // Otherwise, doc according to privacy. This will also check // doc for default methods defined on traits. - self.check_missing_docs_attrs(cx, Some(m.id), m.attrs[], + self.check_missing_docs_attrs(cx, Some(m.id), m.attrs.index(&FullRange), m.span, "a method"); } } fn check_ty_method(&mut self, cx: &Context, tm: &ast::TypeMethod) { - self.check_missing_docs_attrs(cx, Some(tm.id), tm.attrs[], + self.check_missing_docs_attrs(cx, Some(tm.id), tm.attrs.index(&FullRange), tm.span, "a type method"); } @@ -1555,14 +1556,14 @@ impl LintPass for MissingDoc { let cur_struct_def = *self.struct_def_stack.last() .expect("empty struct_def_stack"); self.check_missing_docs_attrs(cx, Some(cur_struct_def), - sf.node.attrs[], sf.span, + sf.node.attrs.index(&FullRange), sf.span, "a struct field") } } } fn check_variant(&mut self, cx: &Context, v: &ast::Variant, _: &ast::Generics) { - self.check_missing_docs_attrs(cx, Some(v.node.id), v.node.attrs[], + self.check_missing_docs_attrs(cx, Some(v.node.id), v.node.attrs.index(&FullRange), v.span, "a variant"); assert!(!self.in_variant); self.in_variant = true; @@ -1629,7 +1630,6 @@ declare_lint! { Warn, "detects use of #[deprecated] items" } - // FIXME #6875: Change to Warn after std library stabilization is complete declare_lint! { EXPERIMENTAL, @@ -1674,7 +1674,7 @@ impl Stability { _ => format!("use of {} item", label) }; - cx.span_lint(lint, span, msg[]); + cx.span_lint(lint, span, msg.index(&FullRange)); } fn is_internal(&self, cx: &Context, span: Span) -> bool { @@ -1848,7 +1848,7 @@ declare_lint! { "detects transmutes of fat pointers" } -declare_lint!{ +declare_lint! { pub MISSING_COPY_IMPLEMENTATIONS, Warn, "detects potentially-forgotten implementations of `Copy`" diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 5c0fd8944368f..51998bdbcf299 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -104,7 +104,7 @@ impl LintStore { } pub fn get_lints<'t>(&'t self) -> &'t [(&'static Lint, bool)] { - self.lints[] + self.lints.index(&FullRange) } pub fn get_lint_groups<'t>(&'t self) -> Vec<(&'static str, Vec, bool)> { @@ -124,11 +124,11 @@ impl LintStore { match (sess, from_plugin) { // We load builtin lints first, so a duplicate is a compiler bug. // Use early_error when handling -W help with no crate. - (None, _) => early_error(msg[]), - (Some(sess), false) => sess.bug(msg[]), + (None, _) => early_error(msg.index(&FullRange)), + (Some(sess), false) => sess.bug(msg.index(&FullRange)), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(msg[]), + (Some(sess), true) => sess.err(msg.index(&FullRange)), } } @@ -149,11 +149,11 @@ impl LintStore { match (sess, from_plugin) { // We load builtin lints first, so a duplicate is a compiler bug. // Use early_error when handling -W help with no crate. - (None, _) => early_error(msg[]), - (Some(sess), false) => sess.bug(msg[]), + (None, _) => early_error(msg.index(&FullRange)), + (Some(sess), false) => sess.bug(msg.index(&FullRange)), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(msg[]), + (Some(sess), true) => sess.err(msg.index(&FullRange)), } } } @@ -267,8 +267,8 @@ impl LintStore { let warning = format!("lint {} has been renamed to {}", lint_name, new_name); match span { - Some(span) => sess.span_warn(span, warning[]), - None => sess.warn(warning[]), + Some(span) => sess.span_warn(span, warning.index(&FullRange)), + None => sess.warn(warning.index(&FullRange)), }; Some(lint_id) } @@ -278,13 +278,13 @@ impl LintStore { pub fn process_command_line(&mut self, sess: &Session) { for &(ref lint_name, level) in sess.opts.lint_opts.iter() { - match self.find_lint(lint_name[], sess, None) { + match self.find_lint(lint_name.index(&FullRange), sess, None) { Some(lint_id) => self.set_level(lint_id, (level, CommandLine)), None => { match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone())) .collect::>>() - .get(lint_name[]) { + .get(lint_name.index(&FullRange)) { Some(v) => { v.iter() .map(|lint_id: &LintId| @@ -292,7 +292,7 @@ impl LintStore { .collect::>(); } None => sess.err(format!("unknown {} flag: {}", - level.as_str(), lint_name)[]), + level.as_str(), lint_name).index(&FullRange)), } } } @@ -403,10 +403,10 @@ pub fn raw_emit_lint(sess: &Session, lint: &'static Lint, if level == Forbid { level = Deny; } match (level, span) { - (Warn, Some(sp)) => sess.span_warn(sp, msg[]), - (Warn, None) => sess.warn(msg[]), - (Deny, Some(sp)) => sess.span_err(sp, msg[]), - (Deny, None) => sess.err(msg[]), + (Warn, Some(sp)) => sess.span_warn(sp, msg.index(&FullRange)), + (Warn, None) => sess.warn(msg.index(&FullRange)), + (Deny, Some(sp)) => sess.span_err(sp, msg.index(&FullRange)), + (Deny, None) => sess.err(msg.index(&FullRange)), _ => sess.bug("impossible level in raw_emit_lint"), } @@ -499,7 +499,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> { None => { self.span_lint(builtin::UNKNOWN_LINTS, span, format!("unknown `{}` attribute: `{}`", - level.as_str(), lint_name)[]); + level.as_str(), lint_name).as_slice()); continue; } } @@ -515,7 +515,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> { self.tcx.sess.span_err(span, format!("{}({}) overruled by outer forbid({})", level.as_str(), lint_name, - lint_name)[]); + lint_name).index(&FullRange)); } else if now != level { let src = self.lints.get_level_source(lint_id).1; self.level_stack.push((lint_id, (now, src))); @@ -550,7 +550,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> { impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { fn visit_item(&mut self, it: &ast::Item) { - self.with_lint_attrs(it.attrs[], |cx| { + self.with_lint_attrs(it.attrs.index(&FullRange), |cx| { run_lints!(cx, check_item, it); cx.visit_ids(|v| v.visit_item(it)); visit::walk_item(cx, it); @@ -558,14 +558,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { } fn visit_foreign_item(&mut self, it: &ast::ForeignItem) { - self.with_lint_attrs(it.attrs[], |cx| { + self.with_lint_attrs(it.attrs.index(&FullRange), |cx| { run_lints!(cx, check_foreign_item, it); visit::walk_foreign_item(cx, it); }) } fn visit_view_item(&mut self, i: &ast::ViewItem) { - self.with_lint_attrs(i.attrs[], |cx| { + self.with_lint_attrs(i.attrs.index(&FullRange), |cx| { run_lints!(cx, check_view_item, i); cx.visit_ids(|v| v.visit_view_item(i)); visit::walk_view_item(cx, i); @@ -591,7 +591,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { body: &'v ast::Block, span: Span, id: ast::NodeId) { match fk { visit::FkMethod(_, _, m) => { - self.with_lint_attrs(m.attrs[], |cx| { + self.with_lint_attrs(m.attrs.index(&FullRange), |cx| { run_lints!(cx, check_fn, fk, decl, body, span, id); cx.visit_ids(|v| { v.visit_fn(fk, decl, body, span, id); @@ -607,7 +607,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { } fn visit_ty_method(&mut self, t: &ast::TypeMethod) { - self.with_lint_attrs(t.attrs[], |cx| { + self.with_lint_attrs(t.attrs.index(&FullRange), |cx| { run_lints!(cx, check_ty_method, t); visit::walk_ty_method(cx, t); }) @@ -624,14 +624,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { } fn visit_struct_field(&mut self, s: &ast::StructField) { - self.with_lint_attrs(s.node.attrs[], |cx| { + self.with_lint_attrs(s.node.attrs.index(&FullRange), |cx| { run_lints!(cx, check_struct_field, s); visit::walk_struct_field(cx, s); }) } fn visit_variant(&mut self, v: &ast::Variant, g: &ast::Generics) { - self.with_lint_attrs(v.node.attrs[], |cx| { + self.with_lint_attrs(v.node.attrs.index(&FullRange), |cx| { run_lints!(cx, check_variant, v, g); visit::walk_variant(cx, v, g); run_lints!(cx, check_variant_post, v, g); @@ -725,7 +725,7 @@ impl<'a, 'tcx> IdVisitingOperation for Context<'a, 'tcx> { None => {} Some(lints) => { for (lint_id, span, msg) in lints.into_iter() { - self.span_lint(lint_id.lint, span, msg[]) + self.span_lint(lint_id.lint, span, msg.index(&FullRange)) } } } @@ -771,7 +771,7 @@ pub fn check_crate(tcx: &ty::ctxt, let mut cx = Context::new(tcx, krate, exported_items); // Visit the whole crate. - cx.with_lint_attrs(krate.attrs[], |cx| { + cx.with_lint_attrs(krate.attrs.index(&FullRange), |cx| { cx.visit_id(ast::CRATE_NODE_ID); cx.visit_ids(|v| { v.visited_outermost = true; @@ -791,7 +791,7 @@ pub fn check_crate(tcx: &ty::ctxt, for &(lint, span, ref msg) in v.iter() { tcx.sess.span_bug(span, format!("unprocessed lint {} at {}: {}", - lint.as_str(), tcx.map.node_to_string(*id), *msg)[]) + lint.as_str(), tcx.map.node_to_string(*id), *msg).as_slice()) } } diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index 171bfd74a816b..66967a735460f 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -20,16 +20,13 @@ use metadata::cstore::{CStore, CrateSource, MetadataBlob}; use metadata::decoder; use metadata::loader; use metadata::loader::CratePaths; -use util::nodemap::FnvHashMap; use std::rc::Rc; -use std::collections::hash_map::Entry::{Occupied, Vacant}; use syntax::ast; use syntax::abi; use syntax::attr; use syntax::attr::AttrMetaMethods; use syntax::codemap::{Span, mk_sp}; -use syntax::diagnostic::SpanHandler; use syntax::parse; use syntax::parse::token::InternedString; use syntax::parse::token; @@ -67,29 +64,9 @@ fn dump_crates(cstore: &CStore) { }) } -fn warn_if_multiple_versions(diag: &SpanHandler, cstore: &CStore) { - let mut map = FnvHashMap::new(); - cstore.iter_crate_data(|cnum, data| { - match map.entry(&data.name()) { - Vacant(entry) => { entry.insert(vec![cnum]); }, - Occupied(mut entry) => { entry.get_mut().push(cnum); }, - } - }); - - for (name, dupes) in map.into_iter() { - if dupes.len() == 1 { continue } - diag.handler().warn( - format!("using multiple versions of crate `{}`", name)[]); - for dupe in dupes.into_iter() { - let data = cstore.get_crate_data(dupe); - diag.span_note(data.span, "used here"); - loader::note_crate_name(diag, data.name()[]); - } - } -} - fn should_link(i: &ast::ViewItem) -> bool { - !attr::contains_name(i.attrs[], "no_link") + !attr::contains_name(i.attrs.index(&FullRange), "no_link") + } struct CrateInfo { @@ -113,7 +90,7 @@ pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option) { for c in s.chars() { if c.is_alphanumeric() { continue } if c == '_' || c == '-' { continue } - err(format!("invalid character `{}` in crate name: `{}`", c, s)[]); + err(format!("invalid character `{}` in crate name: `{}`", c, s).index(&FullRange)); } match sess { Some(sess) => sess.abort_if_errors(), @@ -121,6 +98,7 @@ pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option) { } } + fn register_native_lib(sess: &Session, span: Option, name: String, @@ -188,7 +166,6 @@ impl<'a> CrateReader<'a> { if log_enabled!(log::DEBUG) { dump_crates(&self.sess.cstore); } - warn_if_multiple_versions(self.sess.diagnostic(), &self.sess.cstore); for &(ref name, kind) in self.sess.opts.libs.iter() { register_native_lib(self.sess, None, name.clone(), kind); @@ -212,8 +189,8 @@ impl<'a> CrateReader<'a> { match self.extract_crate_info(i) { Some(info) => { let (cnum, _, _) = self.resolve_crate(&None, - info.ident[], - info.name[], + info.ident.index(&FullRange), + info.name.index(&FullRange), None, i.span, PathKind::Crate); @@ -227,12 +204,12 @@ impl<'a> CrateReader<'a> { match i.node { ast::ViewItemExternCrate(ident, ref path_opt, id) => { let ident = token::get_ident(ident); - debug!("resolving extern crate stmt. ident: {} path_opt: {}", + debug!("resolving extern crate stmt. ident: {} path_opt: {:?}", ident, path_opt); let name = match *path_opt { Some((ref path_str, _)) => { let name = path_str.get().to_string(); - validate_crate_name(Some(self.sess), name[], + validate_crate_name(Some(self.sess), name.index(&FullRange), Some(i.span)); name } @@ -299,7 +276,7 @@ impl<'a> CrateReader<'a> { } else { self.sess.span_err(m.span, format!("unknown kind: `{}`", - k)[]); + k).index(&FullRange)); cstore::NativeUnknown } } @@ -353,7 +330,7 @@ impl<'a> CrateReader<'a> { match self.sess.opts.externs.get(name) { Some(locs) => { let found = locs.iter().any(|l| { - let l = fs::realpath(&Path::new(l[])).ok(); + let l = fs::realpath(&Path::new(l.index(&FullRange))).ok(); l == source.dylib || l == source.rlib }); if found { @@ -432,7 +409,7 @@ impl<'a> CrateReader<'a> { crate_name: name, hash: hash.map(|a| &*a), filesearch: self.sess.target_filesearch(kind), - triple: self.sess.opts.target_triple[], + triple: self.sess.opts.target_triple.index(&FullRange), root: root, rejected_via_hash: vec!(), rejected_via_triple: vec!(), @@ -458,8 +435,8 @@ impl<'a> CrateReader<'a> { decoder::get_crate_deps(cdata).iter().map(|dep| { debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash); let (local_cnum, _, _) = self.resolve_crate(root, - dep.name[], - dep.name[], + dep.name.index(&FullRange), + dep.name.index(&FullRange), Some(&dep.hash), span, PathKind::Dependency); @@ -470,7 +447,7 @@ impl<'a> CrateReader<'a> { pub fn read_plugin_metadata<'b>(&'b mut self, vi: &'b ast::ViewItem) -> PluginMetadata<'b> { let info = self.extract_crate_info(vi).unwrap(); - let target_triple = self.sess.opts.target_triple[]; + let target_triple = self.sess.opts.target_triple.index(&FullRange); let is_cross = target_triple != config::host_triple(); let mut should_link = info.should_link && !is_cross; let mut target_only = false; @@ -479,8 +456,8 @@ impl<'a> CrateReader<'a> { let mut load_ctxt = loader::Context { sess: self.sess, span: vi.span, - ident: ident[], - crate_name: name[], + ident: ident.index(&FullRange), + crate_name: name.index(&FullRange), hash: None, filesearch: self.sess.host_filesearch(PathKind::Crate), triple: config::host_triple(), @@ -505,11 +482,11 @@ impl<'a> CrateReader<'a> { }; let dylib = library.dylib.clone(); - let register = should_link && self.existing_match(info.name[], None).is_none(); + let register = should_link && self.existing_match(info.name.as_slice(), None).is_none(); let metadata = if register { // Register crate now to avoid double-reading metadata - let (_, cmd, _) = self.register_crate(&None, info.ident[], - info.name[], vi.span, library); + let (_, cmd, _) = self.register_crate(&None, info.ident.index(&FullRange), + info.name.index(&FullRange), vi.span, library); PMDSource::Registered(cmd) } else { // Not registering the crate; just hold on to the metadata @@ -530,8 +507,8 @@ impl<'a> CrateReader<'a> { impl<'a> PluginMetadata<'a> { /// Read exported macros pub fn exported_macros(&self) -> Vec { - let imported_from = Some(token::intern(self.info.ident[]).ident()); - let source_name = format!("<{} macros>", self.info.ident[]); + let imported_from = Some(token::intern(self.info.ident.index(&FullRange)).ident()); + let source_name = format!("<{} macros>", self.info.ident.index(&FullRange)); let mut macros = vec![]; decoder::each_exported_macro(self.metadata.as_slice(), &*self.sess.cstore.intr, @@ -573,7 +550,7 @@ impl<'a> PluginMetadata<'a> { self.info.ident, config::host_triple(), self.sess.opts.target_triple); - self.sess.span_err(self.vi_span, message[]); + self.sess.span_err(self.vi_span, message.index(&FullRange)); self.sess.abort_if_errors(); } @@ -586,7 +563,7 @@ impl<'a> PluginMetadata<'a> { let message = format!("plugin crate `{}` only found in rlib format, \ but must be available in dylib format", self.info.ident); - self.sess.span_err(self.vi_span, message[]); + self.sess.span_err(self.vi_span, message.index(&FullRange)); // No need to abort because the loading code will just ignore this // empty dylib. None diff --git a/src/librustc/metadata/csearch.rs b/src/librustc/metadata/csearch.rs index 1401a7d4a1a6e..72ce61b133a2b 100644 --- a/src/librustc/metadata/csearch.rs +++ b/src/librustc/metadata/csearch.rs @@ -95,7 +95,7 @@ pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec // FIXME #1920: This path is not always correct if the crate is not linked // into the root namespace. - let mut r = vec![ast_map::PathMod(token::intern(cdata.name[]))]; + let mut r = vec![ast_map::PathMod(token::intern(cdata.name.index(&FullRange)))]; r.push_all(path.as_slice()); r } @@ -246,13 +246,13 @@ pub fn get_field_type<'tcx>(tcx: &ty::ctxt<'tcx>, class_id: ast::DefId, let class_doc = expect(tcx.sess.diagnostic(), decoder::maybe_find_item(class_id.node, all_items), || { - (format!("get_field_type: class ID {} not found", + (format!("get_field_type: class ID {:?} not found", class_id)).to_string() }); let the_field = expect(tcx.sess.diagnostic(), decoder::maybe_find_item(def.node, class_doc), || { - (format!("get_field_type: in class {}, field ID {} not found", + (format!("get_field_type: in class {:?}, field ID {:?} not found", class_id, def)).to_string() }); diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index ed0a1f6211b16..9e71c867efa09 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -75,7 +75,7 @@ fn lookup_hash<'a, F>(d: rbml::Doc<'a>, mut eq_fn: F, hash: u64) -> Option(item_id: ast::NodeId, items: rbml::Doc<'a>) -> Option> { fn eq_item(bytes: &[u8], item_id: ast::NodeId) -> bool { return u64_from_be_bytes( - bytes[0u..4u], 0u, 4u) as ast::NodeId + bytes.index(&(0u..4u)), 0u, 4u) as ast::NodeId == item_id; } lookup_hash(items, @@ -675,14 +675,14 @@ pub fn maybe_get_item_ast<'tcx>(cdata: Cmd, tcx: &ty::ctxt<'tcx>, id: ast::NodeI debug!("Looking up item: {}", id); let item_doc = lookup_item(id, cdata.data()); let path = item_path(item_doc).init().to_vec(); - match decode_inlined_item.call_mut((cdata, tcx, path, item_doc)) { + match decode_inlined_item(cdata, tcx, path, item_doc) { Ok(ii) => csearch::found(ii), Err(path) => { match item_parent_item(item_doc) { Some(did) => { let did = translate_def_id(cdata, did); let parent_item = lookup_item(did.node, cdata.data()); - match decode_inlined_item.call_mut((cdata, tcx, path, parent_item)) { + match decode_inlined_item(cdata, tcx, path, parent_item) { Ok(ii) => csearch::found_parent(did, ii), Err(_) => csearch::not_found } @@ -1191,7 +1191,7 @@ pub fn get_crate_deps(data: &[u8]) -> Vec { } reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| { let name = docstr(depdoc, tag_crate_dep_crate_name); - let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash)[]); + let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash).index(&FullRange)); deps.push(CrateDep { cnum: crate_num, name: name, diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index e4226ddde85b6..28ad36194ef9a 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -95,7 +95,7 @@ fn encode_impl_type_basename(rbml_w: &mut Encoder, name: ast::Ident) { } pub fn encode_def_id(rbml_w: &mut Encoder, id: DefId) { - rbml_w.wr_tagged_str(tag_def_id, def_to_string(id)[]); + rbml_w.wr_tagged_str(tag_def_id, def_to_string(id).index(&FullRange)); } #[derive(Clone)] @@ -154,7 +154,7 @@ fn encode_variant_id(rbml_w: &mut Encoder, vid: DefId) { rbml_w.end_tag(); rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(s[]); + rbml_w.wr_str(s.index(&FullRange)); rbml_w.end_tag(); } @@ -264,7 +264,7 @@ fn encode_symbol(ecx: &EncodeContext, } None => { ecx.diag.handler().bug( - format!("encode_symbol: id not found {}", id)[]); + format!("encode_symbol: id not found {}", id).index(&FullRange)); } } rbml_w.end_tag(); @@ -332,8 +332,8 @@ fn encode_enum_variant_info(ecx: &EncodeContext, encode_name(rbml_w, variant.node.name.name); encode_parent_item(rbml_w, local_def(id)); encode_visibility(rbml_w, variant.node.vis); - encode_attributes(rbml_w, variant.node.attrs[]); - encode_repr_attrs(rbml_w, ecx, variant.node.attrs[]); + encode_attributes(rbml_w, variant.node.attrs.index(&FullRange)); + encode_repr_attrs(rbml_w, ecx, variant.node.attrs.index(&FullRange)); let stab = stability::lookup(ecx.tcx, ast_util::local_def(variant.node.id)); encode_stability(rbml_w, stab); @@ -344,9 +344,9 @@ fn encode_enum_variant_info(ecx: &EncodeContext, let fields = ty::lookup_struct_fields(ecx.tcx, def_id); let idx = encode_info_for_struct(ecx, rbml_w, - fields[], + fields.index(&FullRange), index); - encode_struct_fields(rbml_w, fields[], def_id); + encode_struct_fields(rbml_w, fields.index(&FullRange), def_id); encode_index(rbml_w, idx, write_i64); } } @@ -386,12 +386,12 @@ fn encode_reexported_static_method(rbml_w: &mut Encoder, exp.name, token::get_name(method_name)); rbml_w.start_tag(tag_items_data_item_reexport); rbml_w.start_tag(tag_items_data_item_reexport_def_id); - rbml_w.wr_str(def_to_string(method_def_id)[]); + rbml_w.wr_str(def_to_string(method_def_id).index(&FullRange)); rbml_w.end_tag(); rbml_w.start_tag(tag_items_data_item_reexport_name); rbml_w.wr_str(format!("{}::{}", exp.name, - token::get_name(method_name))[]); + token::get_name(method_name)).index(&FullRange)); rbml_w.end_tag(); rbml_w.end_tag(); } @@ -529,7 +529,7 @@ fn encode_reexports(ecx: &EncodeContext, id); rbml_w.start_tag(tag_items_data_item_reexport); rbml_w.start_tag(tag_items_data_item_reexport_def_id); - rbml_w.wr_str(def_to_string(exp.def_id)[]); + rbml_w.wr_str(def_to_string(exp.def_id).index(&FullRange)); rbml_w.end_tag(); rbml_w.start_tag(tag_items_data_item_reexport_name); rbml_w.wr_str(exp.name.as_str()); @@ -562,13 +562,13 @@ fn encode_info_for_mod(ecx: &EncodeContext, // Encode info about all the module children. for item in md.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(item.id))[]); + rbml_w.wr_str(def_to_string(local_def(item.id)).index(&FullRange)); rbml_w.end_tag(); each_auxiliary_node_id(&**item, |auxiliary_node_id| { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(def_to_string(local_def( - auxiliary_node_id))[]); + auxiliary_node_id)).index(&FullRange)); rbml_w.end_tag(); true }); @@ -580,7 +580,7 @@ fn encode_info_for_mod(ecx: &EncodeContext, did, ecx.tcx.map.node_to_string(did)); rbml_w.start_tag(tag_mod_impl); - rbml_w.wr_str(def_to_string(local_def(did))[]); + rbml_w.wr_str(def_to_string(local_def(did)).index(&FullRange)); rbml_w.end_tag(); } } @@ -615,7 +615,7 @@ fn encode_visibility(rbml_w: &mut Encoder, visibility: ast::Visibility) { ast::Public => 'y', ast::Inherited => 'i', }; - rbml_w.wr_str(ch.to_string()[]); + rbml_w.wr_str(ch.to_string().index(&FullRange)); rbml_w.end_tag(); } @@ -627,7 +627,7 @@ fn encode_unboxed_closure_kind(rbml_w: &mut Encoder, ty::FnMutUnboxedClosureKind => 'm', ty::FnOnceUnboxedClosureKind => 'o', }; - rbml_w.wr_str(ch.to_string()[]); + rbml_w.wr_str(ch.to_string().index(&FullRange)); rbml_w.end_tag(); } @@ -788,7 +788,7 @@ fn encode_generics<'a, 'tcx>(rbml_w: &mut Encoder, rbml_w.end_tag(); rbml_w.wr_tagged_str(tag_region_param_def_def_id, - def_to_string(param.def_id)[]); + def_to_string(param.def_id).index(&FullRange)); rbml_w.wr_tagged_u64(tag_region_param_def_space, param.space.to_uint() as u64); @@ -845,7 +845,7 @@ fn encode_info_for_method<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, parent_id: NodeId, ast_item_opt: Option<&ast::ImplItem>) { - debug!("encode_info_for_method: {} {}", m.def_id, + debug!("encode_info_for_method: {:?} {:?}", m.def_id, token::get_name(m.name)); rbml_w.start_tag(tag_items_data_item); @@ -864,9 +864,9 @@ fn encode_info_for_method<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, encode_path(rbml_w, impl_path.chain(Some(elem).into_iter())); match ast_item_opt { Some(&ast::MethodImplItem(ref ast_method)) => { - encode_attributes(rbml_w, ast_method.attrs[]); + encode_attributes(rbml_w, ast_method.attrs.index(&FullRange)); let any_types = !pty.generics.types.is_empty(); - if any_types || is_default_impl || should_inline(ast_method.attrs[]) { + if any_types || is_default_impl || should_inline(ast_method.attrs.index(&FullRange)) { encode_inlined_item(ecx, rbml_w, IIImplItemRef(local_def(parent_id), ast_item_opt.unwrap())); } @@ -887,7 +887,7 @@ fn encode_info_for_associated_type(ecx: &EncodeContext, impl_path: PathElems, parent_id: NodeId, typedef_opt: Option>) { - debug!("encode_info_for_associated_type({},{})", + debug!("encode_info_for_associated_type({:?},{:?})", associated_type.def_id, token::get_name(associated_type.name)); @@ -912,7 +912,7 @@ fn encode_info_for_associated_type(ecx: &EncodeContext, match typedef_opt { None => {} Some(typedef) => { - encode_attributes(rbml_w, typedef.attrs[]); + encode_attributes(rbml_w, typedef.attrs.index(&FullRange)); encode_type(ecx, rbml_w, ty::node_id_to_type(ecx.tcx, typedef.id)); } @@ -953,7 +953,7 @@ fn encode_inlined_item(ecx: &EncodeContext, ii: InlinedItemRef) { let mut eii = ecx.encode_inlined_item.borrow_mut(); let eii: &mut EncodeInlinedItem = &mut *eii; - eii.call_mut((ecx, rbml_w, ii)) + eii(ecx, rbml_w, ii) } const FN_FAMILY: char = 'f'; @@ -1046,7 +1046,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_path(rbml_w, path); encode_visibility(rbml_w, vis); encode_stability(rbml_w, stab); - encode_attributes(rbml_w, item.attrs[]); + encode_attributes(rbml_w, item.attrs.index(&FullRange)); rbml_w.end_tag(); } ast::ItemConst(_, _) => { @@ -1072,8 +1072,8 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(rbml_w, item.ident.name); encode_path(rbml_w, path); - encode_attributes(rbml_w, item.attrs[]); - if tps_len > 0u || should_inline(item.attrs[]) { + encode_attributes(rbml_w, item.attrs.index(&FullRange)); + if tps_len > 0u || should_inline(item.attrs.index(&FullRange)) { encode_inlined_item(ecx, rbml_w, IIItemRef(item)); } if tps_len == 0 { @@ -1089,7 +1089,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_info_for_mod(ecx, rbml_w, m, - item.attrs[], + item.attrs.index(&FullRange), item.id, path, item.ident, @@ -1106,7 +1106,7 @@ fn encode_info_for_item(ecx: &EncodeContext, // Encode all the items in this module. for foreign_item in fm.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(foreign_item.id))[]); + rbml_w.wr_str(def_to_string(local_def(foreign_item.id)).index(&FullRange)); rbml_w.end_tag(); } encode_visibility(rbml_w, vis); @@ -1134,8 +1134,8 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_item_variances(rbml_w, ecx, item.id); encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs[]); - encode_repr_attrs(rbml_w, ecx, item.attrs[]); + encode_attributes(rbml_w, item.attrs.index(&FullRange)); + encode_repr_attrs(rbml_w, ecx, item.attrs.index(&FullRange)); for v in (*enum_definition).variants.iter() { encode_variant_id(rbml_w, local_def(v.node.id)); } @@ -1152,7 +1152,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_enum_variant_info(ecx, rbml_w, item.id, - (*enum_definition).variants[], + (*enum_definition).variants.index(&FullRange), index); } ast::ItemStruct(ref struct_def, _) => { @@ -1164,7 +1164,7 @@ fn encode_info_for_item(ecx: &EncodeContext, class itself */ let idx = encode_info_for_struct(ecx, rbml_w, - fields[], + fields.index(&FullRange), index); /* Index the class*/ @@ -1178,16 +1178,16 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_item_variances(rbml_w, ecx, item.id); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs[]); + encode_attributes(rbml_w, item.attrs.index(&FullRange)); encode_path(rbml_w, path.clone()); encode_stability(rbml_w, stab); encode_visibility(rbml_w, vis); - encode_repr_attrs(rbml_w, ecx, item.attrs[]); + encode_repr_attrs(rbml_w, ecx, item.attrs.index(&FullRange)); /* Encode def_ids for each field and method for methods, write all the stuff get_trait_method needs to know*/ - encode_struct_fields(rbml_w, fields[], def_id); + encode_struct_fields(rbml_w, fields.index(&FullRange), def_id); encode_inlined_item(ecx, rbml_w, IIItemRef(item)); @@ -1219,7 +1219,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_family(rbml_w, 'i'); encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs[]); + encode_attributes(rbml_w, item.attrs.index(&FullRange)); encode_unsafety(rbml_w, unsafety); encode_polarity(rbml_w, polarity); match ty.node { @@ -1323,7 +1323,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_generics(rbml_w, ecx, &trait_def.generics, tag_item_generics); encode_trait_ref(rbml_w, ecx, &*trait_def.trait_ref, tag_item_trait_ref); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs[]); + encode_attributes(rbml_w, item.attrs.index(&FullRange)); encode_visibility(rbml_w, vis); encode_stability(rbml_w, stab); for &method_def_id in ty::trait_item_def_ids(tcx, def_id).iter() { @@ -1341,7 +1341,7 @@ fn encode_info_for_item(ecx: &EncodeContext, rbml_w.end_tag(); rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(method_def_id.def_id())[]); + rbml_w.wr_str(def_to_string(method_def_id.def_id()).index(&FullRange)); rbml_w.end_tag(); } encode_path(rbml_w, path.clone()); @@ -1433,14 +1433,14 @@ fn encode_info_for_item(ecx: &EncodeContext, }; match trait_item { &ast::RequiredMethod(ref m) => { - encode_attributes(rbml_w, m.attrs[]); + encode_attributes(rbml_w, m.attrs.index(&FullRange)); encode_trait_item(rbml_w); encode_item_sort(rbml_w, 'r'); encode_method_argument_names(rbml_w, &*m.decl); } &ast::ProvidedMethod(ref m) => { - encode_attributes(rbml_w, m.attrs[]); + encode_attributes(rbml_w, m.attrs.index(&FullRange)); encode_trait_item(rbml_w); encode_item_sort(rbml_w, 'p'); encode_inlined_item(ecx, rbml_w, IITraitItemRef(def_id, trait_item)); @@ -1449,7 +1449,7 @@ fn encode_info_for_item(ecx: &EncodeContext, &ast::TypeTraitItem(ref associated_type) => { encode_attributes(rbml_w, - associated_type.attrs[]); + associated_type.attrs.index(&FullRange)); encode_item_sort(rbml_w, 't'); } } @@ -1603,7 +1603,7 @@ fn encode_index(rbml_w: &mut Encoder, index: Vec>, mut write_fn: let mut buckets: Vec>> = range(0, 256u16).map(|_| Vec::new()).collect(); for elt in index.into_iter() { let h = hash::hash(&elt.val) as uint; - buckets[h % 256].push(elt); + (&mut buckets[h % 256]).push(elt); } rbml_w.start_tag(tag_index); @@ -1826,10 +1826,10 @@ fn encode_macro_defs(rbml_w: &mut Encoder, rbml_w.start_tag(tag_macro_def); encode_name(rbml_w, def.ident.name); - encode_attributes(rbml_w, def.attrs[]); + encode_attributes(rbml_w, def.attrs.index(&FullRange)); rbml_w.start_tag(tag_macro_def_body); - rbml_w.wr_str(pprust::tts_to_string(def.body[])[]); + rbml_w.wr_str(pprust::tts_to_string(def.body.index(&FullRange)).index(&FullRange)); rbml_w.end_tag(); rbml_w.end_tag(); @@ -1869,7 +1869,7 @@ fn encode_struct_field_attrs(rbml_w: &mut Encoder, krate: &ast::Crate) { fn visit_struct_field(&mut self, field: &ast::StructField) { self.rbml_w.start_tag(tag_struct_field); self.rbml_w.wr_tagged_u32(tag_struct_field_id, field.node.id); - encode_attributes(self.rbml_w, field.node.attrs[]); + encode_attributes(self.rbml_w, field.node.attrs.index(&FullRange)); self.rbml_w.end_tag(); } } @@ -1941,13 +1941,13 @@ fn encode_misc_info(ecx: &EncodeContext, rbml_w.start_tag(tag_misc_info_crate_items); for item in krate.module.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(item.id))[]); + rbml_w.wr_str(def_to_string(local_def(item.id)).index(&FullRange)); rbml_w.end_tag(); each_auxiliary_node_id(&**item, |auxiliary_node_id| { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(def_to_string(local_def( - auxiliary_node_id))[]); + auxiliary_node_id)).index(&FullRange)); rbml_w.end_tag(); true }); @@ -2116,17 +2116,17 @@ fn encode_metadata_inner(wr: &mut SeekableMemWriter, let mut rbml_w = writer::Encoder::new(wr); - encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name[]); + encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name.index(&FullRange)); encode_crate_triple(&mut rbml_w, tcx.sess .opts .target_triple - []); + .index(&FullRange)); encode_hash(&mut rbml_w, &ecx.link_meta.crate_hash); encode_dylib_dependency_formats(&mut rbml_w, &ecx); let mut i = rbml_w.writer.tell().unwrap(); - encode_attributes(&mut rbml_w, krate.attrs[]); + encode_attributes(&mut rbml_w, krate.attrs.index(&FullRange)); stats.attr_bytes = rbml_w.writer.tell().unwrap() - i; i = rbml_w.writer.tell().unwrap(); diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index 7c0645b4ca204..0fa9472287caa 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -315,14 +315,14 @@ impl<'a> Context<'a> { &Some(ref r) => format!("{} which `{}` depends on", message, r.ident) }; - self.sess.span_err(self.span, message[]); + self.sess.span_err(self.span, message.index(&FullRange)); if self.rejected_via_triple.len() > 0 { let mismatches = self.rejected_via_triple.iter(); for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() { self.sess.fileline_note(self.span, format!("crate `{}`, path #{}, triple {}: {}", - self.ident, i+1, got, path.display())[]); + self.ident, i+1, got, path.display()).index(&FullRange)); } } if self.rejected_via_hash.len() > 0 { @@ -332,7 +332,7 @@ impl<'a> Context<'a> { for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() { self.sess.fileline_note(self.span, format!("crate `{}` path {}{}: {}", - self.ident, "#", i+1, path.display())[]); + self.ident, "#", i+1, path.display()).index(&FullRange)); } match self.root { &None => {} @@ -340,7 +340,7 @@ impl<'a> Context<'a> { for (i, path) in r.paths().iter().enumerate() { self.sess.fileline_note(self.span, format!("crate `{}` path #{}: {}", - r.ident, i+1, path.display())[]); + r.ident, i+1, path.display()).index(&FullRange)); } } } @@ -386,7 +386,7 @@ impl<'a> Context<'a> { None => return FileDoesntMatch, Some(file) => file, }; - let (hash, rlib) = if file.starts_with(rlib_prefix[]) && + let (hash, rlib) = if file.starts_with(rlib_prefix.index(&FullRange)) && file.ends_with(".rlib") { (file.slice(rlib_prefix.len(), file.len() - ".rlib".len()), true) @@ -400,7 +400,7 @@ impl<'a> Context<'a> { info!("lib candidate: {}", path.display()); let hash_str = hash.to_string(); - let slot = candidates.entry(&hash_str).get().unwrap_or_else( + let slot = candidates.entry(hash_str).get().unwrap_or_else( |vacant_entry| vacant_entry.insert((HashSet::new(), HashSet::new()))); let (ref mut rlibs, ref mut dylibs) = *slot; if rlib { @@ -446,26 +446,26 @@ impl<'a> Context<'a> { _ => { self.sess.span_err(self.span, format!("multiple matching crates for `{}`", - self.crate_name)[]); + self.crate_name).index(&FullRange)); self.sess.note("candidates:"); for lib in libraries.iter() { match lib.dylib { Some(ref p) => { self.sess.note(format!("path: {}", - p.display())[]); + p.display()).index(&FullRange)); } None => {} } match lib.rlib { Some(ref p) => { self.sess.note(format!("path: {}", - p.display())[]); + p.display()).index(&FullRange)); } None => {} } let data = lib.metadata.as_slice(); let name = decoder::get_crate_name(data); - note_crate_name(self.sess.diagnostic(), name[]); + note_crate_name(self.sess.diagnostic(), name.index(&FullRange)); } None } @@ -519,11 +519,11 @@ impl<'a> Context<'a> { format!("multiple {} candidates for `{}` \ found", flavor, - self.crate_name)[]); + self.crate_name).index(&FullRange)); self.sess.span_note(self.span, format!(r"candidate #1: {}", ret.as_ref().unwrap() - .display())[]); + .display()).index(&FullRange)); error = 1; ret = None; } @@ -531,7 +531,7 @@ impl<'a> Context<'a> { error += 1; self.sess.span_note(self.span, format!(r"candidate #{}: {}", error, - lib.display())[]); + lib.display()).index(&FullRange)); continue } *slot = Some(metadata); @@ -606,17 +606,17 @@ impl<'a> Context<'a> { let mut rlibs = HashSet::new(); let mut dylibs = HashSet::new(); { - let mut locs = locs.iter().map(|l| Path::new(l[])).filter(|loc| { + let mut locs = locs.iter().map(|l| Path::new(l.index(&FullRange))).filter(|loc| { if !loc.exists() { sess.err(format!("extern location for {} does not exist: {}", - self.crate_name, loc.display())[]); + self.crate_name, loc.display()).index(&FullRange)); return false; } let file = match loc.filename_str() { Some(file) => file, None => { sess.err(format!("extern location for {} is not a file: {}", - self.crate_name, loc.display())[]); + self.crate_name, loc.display()).index(&FullRange)); return false; } }; @@ -624,12 +624,13 @@ impl<'a> Context<'a> { return true } else { let (ref prefix, ref suffix) = dylibname; - if file.starts_with(prefix[]) && file.ends_with(suffix[]) { + if file.starts_with(prefix.index(&FullRange)) && + file.ends_with(suffix.index(&FullRange)) { return true } } sess.err(format!("extern location for {} is of an unknown type: {}", - self.crate_name, loc.display())[]); + self.crate_name, loc.display()).index(&FullRange)); false }); @@ -662,7 +663,7 @@ impl<'a> Context<'a> { } pub fn note_crate_name(diag: &SpanHandler, name: &str) { - diag.handler().note(format!("crate name: {}", name)[]); + diag.handler().note(format!("crate name: {}", name).index(&FullRange)); } impl ArchiveMetadata { diff --git a/src/librustc/metadata/tydecode.rs b/src/librustc/metadata/tydecode.rs index 07dc13ff0d48f..a4304bf1e2d62 100644 --- a/src/librustc/metadata/tydecode.rs +++ b/src/librustc/metadata/tydecode.rs @@ -98,7 +98,7 @@ fn scan(st: &mut PState, mut is_last: F, op: G) -> R where } let end_pos = st.pos; st.pos += 1; - return op(st.data[start_pos..end_pos]); + return op(st.data.index(&(start_pos..end_pos))); } pub fn parse_ident(st: &mut PState, last: char) -> ast::Ident { @@ -251,7 +251,7 @@ fn parse_trait_store_(st: &mut PState, conv: &mut F) -> ty::TraitStore where '&' => ty::RegionTraitStore(parse_region_(st, conv), parse_mutability(st)), c => { st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'", - c)[]) + c).index(&FullRange)) } } } @@ -318,7 +318,7 @@ fn parse_bound_region_(st: &mut PState, conv: &mut F) -> ty::BoundRegion wher } '[' => { let def = parse_def_(st, RegionParameter, conv); - let ident = token::str_to_ident(parse_str(st, ']')[]); + let ident = token::str_to_ident(parse_str(st, ']').index(&FullRange)); ty::BrNamed(def, ident.name) } 'f' => { @@ -357,7 +357,7 @@ fn parse_region_(st: &mut PState, conv: &mut F) -> ty::Region where assert_eq!(next(st), '|'); let index = parse_u32(st); assert_eq!(next(st), '|'); - let nm = token::str_to_ident(parse_str(st, ']')[]); + let nm = token::str_to_ident(parse_str(st, ']').index(&FullRange)); ty::ReEarlyBound(node_id, space, index, nm.name) } 'f' => { @@ -443,8 +443,8 @@ fn parse_ty_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> Ty<'tcx> w let tcx = st.tcx; match next(st) { 'b' => return tcx.types.bool, - 'i' => return tcx.types.int, - 'u' => return tcx.types.uint, + 'i' => { /* eat the s of is */ next(st); return tcx.types.int }, + 'u' => { /* eat the s of us */ next(st); return tcx.types.uint }, 'M' => { match next(st) { 'b' => return tcx.types.u8, @@ -481,7 +481,7 @@ fn parse_ty_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> Ty<'tcx> w assert_eq!(next(st), '|'); let space = parse_param_space(st); assert_eq!(next(st), '|'); - let name = token::intern(parse_str(st, ']')[]); + let name = token::intern(parse_str(st, ']').index(&FullRange)); return ty::mk_param(tcx, space, index, name); } '~' => return ty::mk_uniq(tcx, parse_ty_(st, conv)), @@ -637,7 +637,7 @@ fn parse_abi_set(st: &mut PState) -> abi::Abi { assert_eq!(next(st), '['); scan(st, |c| c == ']', |bytes| { let abi_str = str::from_utf8(bytes).unwrap(); - abi::lookup(abi_str[]).expect(abi_str) + abi::lookup(abi_str.index(&FullRange)).expect(abi_str) }) } @@ -733,17 +733,17 @@ pub fn parse_def_id(buf: &[u8]) -> ast::DefId { panic!(); } - let crate_part = buf[0u..colon_idx]; - let def_part = buf[colon_idx + 1u..len]; + let crate_part = buf.index(&(0u..colon_idx)); + let def_part = buf.index(&((colon_idx + 1u)..len)); let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| s.parse::()) { Some(cn) => cn as ast::CrateNum, - None => panic!("internal error: parse_def_id: crate number expected, found {}", + None => panic!("internal error: parse_def_id: crate number expected, found {:?}", crate_part) }; let def_num = match str::from_utf8(def_part).ok().and_then(|s| s.parse::()) { Some(dn) => dn as ast::NodeId, - None => panic!("internal error: parse_def_id: id expected, found {}", + None => panic!("internal error: parse_def_id: id expected, found {:?}", def_part) }; ast::DefId { krate: crate_num, node: def_num } diff --git a/src/librustc/metadata/tyencode.rs b/src/librustc/metadata/tyencode.rs index 0042209aced6a..c019d129218b6 100644 --- a/src/librustc/metadata/tyencode.rs +++ b/src/librustc/metadata/tyencode.rs @@ -61,7 +61,7 @@ pub fn enc_ty<'a, 'tcx>(w: &mut SeekableMemWriter, cx: &ctxt<'a, 'tcx>, t: Ty<'t ty::ty_char => mywrite!(w, "c"), ty::ty_int(t) => { match t { - ast::TyI => mywrite!(w, "i"), + ast::TyIs => mywrite!(w, "is"), ast::TyI8 => mywrite!(w, "MB"), ast::TyI16 => mywrite!(w, "MW"), ast::TyI32 => mywrite!(w, "ML"), @@ -70,7 +70,7 @@ pub fn enc_ty<'a, 'tcx>(w: &mut SeekableMemWriter, cx: &ctxt<'a, 'tcx>, t: Ty<'t } ty::ty_uint(t) => { match t { - ast::TyU => mywrite!(w, "u"), + ast::TyUs => mywrite!(w, "us"), ast::TyU8 => mywrite!(w, "Mb"), ast::TyU16 => mywrite!(w, "Mw"), ast::TyU32 => mywrite!(w, "Ml"), diff --git a/src/librustc/middle/astconv_util.rs b/src/librustc/middle/astconv_util.rs index b2ad77467276f..955f522b8041b 100644 --- a/src/librustc/middle/astconv_util.rs +++ b/src/librustc/middle/astconv_util.rs @@ -48,7 +48,7 @@ pub fn ast_ty_to_prim_ty<'tcx>(tcx: &ty::ctxt<'tcx>, ast_ty: &ast::Ty) None => { tcx.sess.span_bug(ast_ty.span, format!("unbound path {}", - path.repr(tcx))[]) + path.repr(tcx)).index(&FullRange)) } Some(&d) => d }; diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index 93a19a01f668f..32ce131c57a3c 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -82,7 +82,7 @@ pub fn encode_inlined_item(ecx: &e::EncodeContext, e::IIImplItemRef(_, &ast::MethodImplItem(ref m)) => m.id, e::IIImplItemRef(_, &ast::TypeImplItem(ref ti)) => ti.id, }; - debug!("> Encoding inlined item: {} ({})", + debug!("> Encoding inlined item: {} ({:?})", ecx.tcx.map.path_to_string(id), rbml_w.writer.tell()); @@ -96,7 +96,7 @@ pub fn encode_inlined_item(ecx: &e::EncodeContext, encode_side_tables_for_ii(ecx, rbml_w, &ii); rbml_w.end_tag(); - debug!("< Encoded inlined fn: {} ({})", + debug!("< Encoded inlined fn: {} ({:?})", ecx.tcx.map.path_to_string(id), rbml_w.writer.tell()); } @@ -127,12 +127,12 @@ pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata, None => Err(path), Some(ast_doc) => { let mut path_as_str = None; - debug!("> Decoding inlined fn: {}::?", + debug!("> Decoding inlined fn: {:?}::?", { // Do an Option dance to use the path after it is moved below. let s = ast_map::path_to_string(ast_map::Values(path.iter())); path_as_str = Some(s); - path_as_str.as_ref().map(|x| x[]) + path_as_str.as_ref().map(|x| x.index(&FullRange)) }); let mut ast_dsr = reader::Decoder::new(ast_doc); let from_id_range = Decodable::decode(&mut ast_dsr).unwrap(); @@ -263,14 +263,6 @@ trait def_id_encoder_helpers { fn emit_def_id(&mut self, did: ast::DefId); } -#[cfg(stage0)] -impl, E> def_id_encoder_helpers for S { - fn emit_def_id(&mut self, did: ast::DefId) { - did.encode(self).ok().unwrap() - } -} - -#[cfg(not(stage0))] impl def_id_encoder_helpers for S { fn emit_def_id(&mut self, did: ast::DefId) { did.encode(self).ok().unwrap() @@ -283,21 +275,6 @@ trait def_id_decoder_helpers { cdata: &cstore::crate_metadata) -> ast::DefId; } -#[cfg(stage0)] -impl, E> def_id_decoder_helpers for D { - fn read_def_id(&mut self, dcx: &DecodeContext) -> ast::DefId { - let did: ast::DefId = Decodable::decode(self).ok().unwrap(); - did.tr(dcx) - } - - fn read_def_id_nodcx(&mut self, - cdata: &cstore::crate_metadata) -> ast::DefId { - let did: ast::DefId = Decodable::decode(self).ok().unwrap(); - decoder::translate_def_id(cdata, did) - } -} - -#[cfg(not(stage0))] impl def_id_decoder_helpers for D { fn read_def_id(&mut self, dcx: &DecodeContext) -> ast::DefId { let did: ast::DefId = Decodable::decode(self).ok().unwrap(); @@ -1880,7 +1857,7 @@ impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> { NominalType | TypeWithId | RegionParameter => dcx.tr_def_id(did), TypeParameter | UnboxedClosureSource => dcx.tr_intern_def_id(did) }; - debug!("convert_def_id(source={}, did={})={}", source, did, r); + debug!("convert_def_id(source={:?}, did={:?})={:?}", source, did, r); return r; } } @@ -1900,7 +1877,7 @@ fn decode_side_tables(dcx: &DecodeContext, None => { dcx.tcx.sess.bug( format!("unknown tag found in side tables: {:x}", - tag)[]); + tag).index(&FullRange)); } Some(value) => { let val_doc = entry_doc.get(c::tag_table_val as uint); @@ -1985,7 +1962,7 @@ fn decode_side_tables(dcx: &DecodeContext, _ => { dcx.tcx.sess.bug( format!("unknown tag found in side tables: {:x}", - tag)[]); + tag).index(&FullRange)); } } } diff --git a/src/librustc/middle/cfg/construct.rs b/src/librustc/middle/cfg/construct.rs index 3c672d0fdb6fa..f7fc90bcef651 100644 --- a/src/librustc/middle/cfg/construct.rs +++ b/src/librustc/middle/cfg/construct.rs @@ -362,7 +362,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { let mut cond_exit = discr_exit; for arm in arms.iter() { cond_exit = self.add_dummy_node(&[cond_exit]); // 2 - let pats_exit = self.pats_any(arm.pats[], + let pats_exit = self.pats_any(arm.pats.index(&FullRange), cond_exit); // 3 let guard_exit = self.opt_expr(&arm.guard, pats_exit); // 4 @@ -480,12 +480,12 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { let inputs = inline_asm.inputs.iter(); let outputs = inline_asm.outputs.iter(); let post_inputs = self.exprs(inputs.map(|a| { - debug!("cfg::construct InlineAsm id:{} input:{}", expr.id, a); + debug!("cfg::construct InlineAsm id:{} input:{:?}", expr.id, a); let &(_, ref expr) = a; &**expr }), pred); let post_outputs = self.exprs(outputs.map(|a| { - debug!("cfg::construct InlineAsm id:{} output:{}", expr.id, a); + debug!("cfg::construct InlineAsm id:{} output:{:?}", expr.id, a); let &(_, ref expr, _) = a; &**expr }), post_inputs); @@ -514,7 +514,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { let func_or_rcvr_exit = self.expr(func_or_rcvr, pred); let ret = self.straightline(call_expr, func_or_rcvr_exit, args); - if return_ty == ty::FnDiverging { + if return_ty.diverges() { self.add_node(ast::DUMMY_NODE_ID, &[]) } else { ret @@ -616,14 +616,14 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.tcx.sess.span_bug( expr.span, format!("no loop scope for id {}", - loop_id)[]); + loop_id).index(&FullRange)); } r => { self.tcx.sess.span_bug( expr.span, - format!("bad entry `{}` in def_map for label", - r)[]); + format!("bad entry `{:?}` in def_map for label", + r).index(&FullRange)); } } } diff --git a/src/librustc/middle/cfg/graphviz.rs b/src/librustc/middle/cfg/graphviz.rs index 8a2ecbca20d55..8b9a0d89b380c 100644 --- a/src/librustc/middle/cfg/graphviz.rs +++ b/src/librustc/middle/cfg/graphviz.rs @@ -52,7 +52,7 @@ fn replace_newline_with_backslash_l(s: String) -> String { } impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> { - fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name[]).unwrap() } + fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name.index(&FullRange)).unwrap() } fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> { dot::Id::new(format!("N{}", i.node_id())).unwrap() @@ -85,7 +85,9 @@ impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> { let s = self.ast_map.node_to_string(node_id); // left-aligns the lines let s = replace_newline_with_backslash_l(s); - label.push_str(format!("exiting scope_{} {}", i, s[])[]); + label.push_str(format!("exiting scope_{} {}", + i, + s.index(&FullRange)).index(&FullRange)); } dot::LabelText::EscStr(label.into_cow()) } diff --git a/src/librustc/middle/check_const.rs b/src/librustc/middle/check_const.rs index ac53bdbefcf10..621d7274b3f7c 100644 --- a/src/librustc/middle/check_const.rs +++ b/src/librustc/middle/check_const.rs @@ -118,7 +118,7 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &ast::Expr) { DefStruct(_) | DefVariant(_, _, _) => {} def => { - debug!("(checking const) found bad def: {}", def); + debug!("(checking const) found bad def: {:?}", def); span_err!(v.tcx.sess, e.span, E0014, "paths in constants may only refer to constants \ or functions"); diff --git a/src/librustc/middle/check_loop.rs b/src/librustc/middle/check_loop.rs index e68a9fb50efd0..5024e5c4f77de 100644 --- a/src/librustc/middle/check_loop.rs +++ b/src/librustc/middle/check_loop.rs @@ -74,11 +74,11 @@ impl<'a> CheckLoopVisitor<'a> { Loop => {} Closure => { self.sess.span_err(span, - format!("`{}` inside of a closure", name)[]); + format!("`{}` inside of a closure", name).index(&FullRange)); } Normal => { self.sess.span_err(span, - format!("`{}` outside of loop", name)[]); + format!("`{}` outside of loop", name).index(&FullRange)); } } } diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index f2b9ecb5ec432..f1edfb37273bb 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -47,7 +47,7 @@ struct Matrix<'a>(Vec>); /// Pretty-printer for matrices of patterns, example: /// ++++++++++++++++++++++++++ -/// + _ + [] + +/// + _ + .index(&FullRange) + /// ++++++++++++++++++++++++++ /// + true + [First] + /// ++++++++++++++++++++++++++ @@ -161,7 +161,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { // First, check legality of move bindings. check_legality_of_move_bindings(cx, arm.guard.is_some(), - arm.pats[]); + arm.pats.index(&FullRange)); // Second, if there is a guard on each arm, make sure it isn't // assigning or borrowing anything mutably. @@ -198,7 +198,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { } // Fourth, check for unreachable arms. - check_arms(cx, inlined_arms[], source); + check_arms(cx, inlined_arms.index(&FullRange), source); // Finally, check if the whole match expression is exhaustive. // Check for empty enum, because is_useful only works on inhabited types. @@ -230,7 +230,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { pat.span, format!("refutable pattern in `for` loop binding: \ `{}` not covered", - pat_to_string(uncovered_pat))[]); + pat_to_string(uncovered_pat)).index(&FullRange)); }); // Check legality of move bindings. @@ -303,7 +303,7 @@ fn check_arms(cx: &MatchCheckCtxt, for pat in pats.iter() { let v = vec![&**pat]; - match is_useful(cx, &seen, v[], LeaveOutWitness) { + match is_useful(cx, &seen, v.index(&FullRange), LeaveOutWitness) { NotUseful => { match source { ast::MatchSource::IfLetDesugar { .. } => { @@ -355,7 +355,7 @@ fn raw_pat<'a>(p: &'a Pat) -> &'a Pat { fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix) { match is_useful(cx, matrix, &[DUMMY_WILD_PAT], ConstructWitness) { UsefulWithWitness(pats) => { - let witness = match pats[] { + let witness = match pats.index(&FullRange) { [ref witness] => &**witness, [] => DUMMY_WILD_PAT, _ => unreachable!() @@ -574,7 +574,7 @@ fn is_useful(cx: &MatchCheckCtxt, witness: WitnessPreference) -> Usefulness { let &Matrix(ref rows) = matrix; - debug!("{:}", matrix); + debug!("{:?}", matrix); if rows.len() == 0u { return match witness { ConstructWitness => UsefulWithWitness(vec!()), @@ -609,7 +609,7 @@ fn is_useful(cx: &MatchCheckCtxt, UsefulWithWitness(pats) => UsefulWithWitness({ let arity = constructor_arity(cx, &c, left_ty); let mut result = { - let pat_slice = pats[]; + let pat_slice = pats.index(&FullRange); let subpats: Vec<_> = range(0, arity).map(|i| { pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p) }).collect(); @@ -656,10 +656,10 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix, witness: WitnessPreference) -> Usefulness { let arity = constructor_arity(cx, &ctor, lty); let matrix = Matrix(m.iter().filter_map(|r| { - specialize(cx, r[], &ctor, 0u, arity) + specialize(cx, r.index(&FullRange), &ctor, 0u, arity) }).collect()); match specialize(cx, v, &ctor, 0u, arity) { - Some(v) => is_useful(cx, &matrix, v[], witness), + Some(v) => is_useful(cx, &matrix, v.index(&FullRange), witness), None => NotUseful } } @@ -729,7 +729,7 @@ fn pat_constructors(cx: &MatchCheckCtxt, p: &Pat, /// This computes the arity of a constructor. The arity of a constructor /// is how many subpattern patterns of that constructor should be expanded to. /// -/// For instance, a tuple pattern (_, 42u, Some([])) has the arity of 3. +/// For instance, a tuple pattern (_, 42u, Some(.index(&FullRange))) has the arity of 3. /// A struct pattern's arity is the number of fields it contains, etc. pub fn constructor_arity(cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> uint { match ty.sty { @@ -926,8 +926,8 @@ pub fn specialize<'a>(cx: &MatchCheckCtxt, r: &[&'a Pat], } }; head.map(|mut head| { - head.push_all(r[..col]); - head.push_all(r[col + 1..]); + head.push_all(r.index(&(0..col))); + head.push_all(r.index(&((col + 1)..))); head }) } @@ -1042,9 +1042,9 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, cx.tcx.sess.span_bug( p.span, format!("binding pattern {} is not an \ - identifier: {}", + identifier: {:?}", p.id, - p.node)[]); + p.node).index(&FullRange)); } } } diff --git a/src/librustc/middle/check_rvalues.rs b/src/librustc/middle/check_rvalues.rs index c383b1579ef84..5ff1f36f0e06a 100644 --- a/src/librustc/middle/check_rvalues.rs +++ b/src/librustc/middle/check_rvalues.rs @@ -59,7 +59,7 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for RvalueContextDelegate<'a, 'tcx> { span: Span, cmt: mc::cmt<'tcx>, _: euv::ConsumeMode) { - debug!("consume; cmt: {}; type: {}", *cmt, ty_to_string(self.tcx, cmt.ty)); + debug!("consume; cmt: {:?}; type: {}", *cmt, ty_to_string(self.tcx, cmt.ty)); if !ty::type_is_sized(self.param_env, span, cmt.ty) { span_err!(self.tcx.sess, span, E0161, "cannot move a value of type {0}: the size of {0} cannot be statically determined", diff --git a/src/librustc/middle/check_static.rs b/src/librustc/middle/check_static.rs index df51cb7e6bc4b..994a2b0dc8abc 100644 --- a/src/librustc/middle/check_static.rs +++ b/src/librustc/middle/check_static.rs @@ -112,7 +112,7 @@ impl<'a, 'tcx> CheckStaticVisitor<'a, 'tcx> { }; self.tcx.sess.span_err(e.span, format!("mutable statics are not allowed \ - to have {}", suffix)[]); + to have {}", suffix).index(&FullRange)); } fn check_static_type(&self, e: &ast::Expr) { @@ -170,7 +170,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckStaticVisitor<'a, 'tcx> { ty::ty_enum(did, _) if ty::has_dtor(self.tcx, did) => { self.tcx.sess.span_err(e.span, format!("{} are not allowed to have \ - destructors", self.msg())[]) + destructors", self.msg()).index(&FullRange)) } _ => {} } @@ -234,7 +234,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckStaticVisitor<'a, 'tcx> { let msg = "constants cannot refer to other statics, \ insert an intermediate constant \ instead"; - self.tcx.sess.span_err(e.span, msg[]); + self.tcx.sess.span_err(e.span, msg.index(&FullRange)); } _ => {} } diff --git a/src/librustc/middle/check_static_recursion.rs b/src/librustc/middle/check_static_recursion.rs index c36b4aa7f231e..75851f0a85333 100644 --- a/src/librustc/middle/check_static_recursion.rs +++ b/src/librustc/middle/check_static_recursion.rs @@ -105,7 +105,7 @@ impl<'a, 'ast, 'v> Visitor<'v> for CheckItemRecursionVisitor<'a, 'ast> { _ => { self.sess.span_err(e.span, format!("expected item, found {}", - self.ast_map.node_to_string(def_id.node))[]); + self.ast_map.node_to_string(def_id.node)).index(&FullRange)); return; }, } diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index 32482fce4daa8..e726993bd4840 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -48,7 +48,7 @@ use std::rc::Rc; // target uses". This _includes_ integer-constants, plus the following // constructors: // -// fixed-size vectors and strings: [] and ""/_ +// fixed-size vectors and strings: .index(&FullRange) and ""/_ // vector and string slices: &[] and &"" // tuples: (,) // enums: foo(...) @@ -117,7 +117,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt, None => None, Some(ast_map::NodeItem(it)) => match it.node { ast::ItemEnum(ast::EnumDef { ref variants }, _) => { - variant_expr(variants[], variant_def.node) + variant_expr(variants.index(&FullRange), variant_def.node) } _ => None }, @@ -138,7 +138,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt, // NOTE this doesn't do the right thing, it compares inlined // NodeId's to the original variant_def's NodeId, but they // come from different crates, so they will likely never match. - variant_expr(variants[], variant_def.node).map(|e| e.id) + variant_expr(variants.index(&FullRange), variant_def.node).map(|e| e.id) } _ => None }, @@ -311,7 +311,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr) -> P { ast::ExprCall(ref callee, ref args) => { let def = tcx.def_map.borrow()[callee.id].clone(); - if let Vacant(entry) = tcx.def_map.borrow_mut().entry(&expr.id) { + if let Vacant(entry) = tcx.def_map.borrow_mut().entry(expr.id) { entry.insert(def); } let path = match def { @@ -364,7 +364,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr) -> P { pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val { match eval_const_expr_partial(tcx, e) { Ok(r) => r, - Err(s) => tcx.sess.span_fatal(e.span, s[]) + Err(s) => tcx.sess.span_fatal(e.span, s.index(&FullRange)) } } @@ -528,12 +528,12 @@ pub fn eval_const_expr_partial(tcx: &ty::ctxt, e: &Expr) -> Result (int, const_int, i64), + ty::ty_int(ast::TyIs) => (int, const_int, i64), ty::ty_int(ast::TyI8) => (i8, const_int, i64), ty::ty_int(ast::TyI16) => (i16, const_int, i64), ty::ty_int(ast::TyI32) => (i32, const_int, i64), ty::ty_int(ast::TyI64) => (i64, const_int, i64), - ty::ty_uint(ast::TyU) => (uint, const_uint, u64), + ty::ty_uint(ast::TyUs) => (uint, const_uint, u64), ty::ty_uint(ast::TyU8) => (u8, const_uint, u64), ty::ty_uint(ast::TyU16) => (u16, const_uint, u64), ty::ty_uint(ast::TyU32) => (u32, const_uint, u64), diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs index e78b8047f6958..bdd98a94fc32f 100644 --- a/src/librustc/middle/dataflow.rs +++ b/src/librustc/middle/dataflow.rs @@ -196,7 +196,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let words_per_id = (bits_per_id + uint::BITS - 1) / uint::BITS; let num_nodes = cfg.graph.all_nodes().len(); - debug!("DataFlowContext::new(analysis_name: {}, id_range={}, \ + debug!("DataFlowContext::new(analysis_name: {}, id_range={:?}, \ bits_per_id={}, words_per_id={}) \ num_nodes: {}", analysis_name, id_range, bits_per_id, words_per_id, @@ -251,7 +251,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { fn apply_gen_kill(&self, cfgidx: CFGIndex, bits: &mut [uint]) { //! Applies the gen and kill sets for `cfgidx` to `bits` - debug!("{} apply_gen_kill(cfgidx={}, bits={}) [before]", + debug!("{} apply_gen_kill(cfgidx={:?}, bits={}) [before]", self.analysis_name, cfgidx, mut_bits_to_string(bits)); assert!(self.bits_per_id > 0); @@ -261,7 +261,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let kills = self.kills.slice(start, end); bitwise(bits, kills, &Subtract); - debug!("{} apply_gen_kill(cfgidx={}, bits={}) [after]", + debug!("{} apply_gen_kill(cfgidx={:?}, bits={}) [after]", self.analysis_name, cfgidx, mut_bits_to_string(bits)); } @@ -312,10 +312,10 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let mut t = on_entry.to_vec(); self.apply_gen_kill(cfgidx, t.as_mut_slice()); temp_bits = t; - temp_bits[] + temp_bits.index(&FullRange) } }; - debug!("{} each_bit_for_node({}, cfgidx={}) bits={}", + debug!("{} each_bit_for_node({:?}, cfgidx={:?}) bits={}", self.analysis_name, e, cfgidx, bits_to_string(slice)); self.each_bit(slice, f) } @@ -410,7 +410,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { } } None => { - debug!("{} add_kills_from_flow_exits flow_exit={} \ + debug!("{} add_kills_from_flow_exits flow_exit={:?} \ no cfg_idx for exiting_scope={}", self.analysis_name, flow_exit, node_id); } @@ -419,10 +419,10 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { if changed { let bits = self.kills.slice_mut(start, end); - debug!("{} add_kills_from_flow_exits flow_exit={} bits={} [before]", + debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [before]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); - bits.clone_from_slice(orig_kills[]); - debug!("{} add_kills_from_flow_exits flow_exit={} bits={} [after]", + bits.clone_from_slice(orig_kills.index(&FullRange)); + debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [after]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); } true @@ -481,7 +481,7 @@ impl<'a, 'b, 'tcx, O:DataFlowOperator> PropagationContext<'a, 'b, 'tcx, O> { assert!(self.dfcx.bits_per_id > 0); cfg.graph.each_node(|node_index, node| { - debug!("DataFlowContext::walk_cfg idx={} id={} begin in_out={}", + debug!("DataFlowContext::walk_cfg idx={:?} id={} begin in_out={}", node_index, node.data.id, bits_to_string(in_out)); let (start, end) = self.dfcx.compute_id_range(node_index); @@ -521,7 +521,7 @@ impl<'a, 'b, 'tcx, O:DataFlowOperator> PropagationContext<'a, 'b, 'tcx, O> { edge: &cfg::CFGEdge) { let source = edge.source(); let cfgidx = edge.target(); - debug!("{} propagate_bits_into_entry_set_for(pred_bits={}, {} to {})", + debug!("{} propagate_bits_into_entry_set_for(pred_bits={}, {:?} to {:?})", self.dfcx.analysis_name, bits_to_string(pred_bits), source, cfgidx); assert!(self.dfcx.bits_per_id > 0); @@ -532,7 +532,7 @@ impl<'a, 'b, 'tcx, O:DataFlowOperator> PropagationContext<'a, 'b, 'tcx, O> { bitwise(on_entry, pred_bits, &self.dfcx.oper) }; if changed { - debug!("{} changed entry set for {} to {}", + debug!("{} changed entry set for {:?} to {}", self.dfcx.analysis_name, cfgidx, bits_to_string(self.dfcx.on_entry.slice(start, end))); self.changed = true; @@ -554,7 +554,7 @@ fn bits_to_string(words: &[uint]) -> String { let mut v = word; for _ in range(0u, uint::BYTES) { result.push(sep); - result.push_str(format!("{:02x}", v & 0xFF)[]); + result.push_str(format!("{:02x}", v & 0xFF).index(&FullRange)); v >>= 8; sep = '-'; } diff --git a/src/librustc/middle/dependency_format.rs b/src/librustc/middle/dependency_format.rs index 6b56ece28bdb2..0bc899a8a6293 100644 --- a/src/librustc/middle/dependency_format.rs +++ b/src/librustc/middle/dependency_format.rs @@ -118,7 +118,7 @@ fn calculate_type(sess: &session::Session, let src = sess.cstore.get_used_crate_source(cnum).unwrap(); if src.rlib.is_some() { return } sess.err(format!("dependency `{}` not found in rlib format", - data.name)[]); + data.name).index(&FullRange)); }); return Vec::new(); } @@ -149,7 +149,7 @@ fn calculate_type(sess: &session::Session, add_library(sess, cnum, cstore::RequireDynamic, &mut formats); let deps = csearch::get_dylib_dependency_formats(&sess.cstore, cnum); for &(depnum, style) in deps.iter() { - debug!("adding {}: {}", style, + debug!("adding {:?}: {}", style, sess.cstore.get_crate_data(depnum).name.clone()); add_library(sess, depnum, style, &mut formats); } @@ -197,7 +197,7 @@ fn calculate_type(sess: &session::Session, match kind { cstore::RequireStatic => "rlib", cstore::RequireDynamic => "dylib", - })[]); + }).index(&FullRange)); } } } @@ -222,7 +222,7 @@ fn add_library(sess: &session::Session, let data = sess.cstore.get_crate_data(cnum); sess.err(format!("cannot satisfy dependencies so `{}` only \ shows up once", - data.name)[]); + data.name).index(&FullRange)); sess.help("having upstream crates all available in one format \ will likely make this go away"); } diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index df2a4e4c2532a..45838436e6097 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -29,7 +29,7 @@ use middle::ty::{MethodOrigin, MethodParam, MethodTypeParam}; use middle::ty::{MethodStatic, MethodStaticUnboxedClosure}; use util::ppaux::Repr; -use std::kinds; +use std::marker; use syntax::{ast, ast_util}; use syntax::ptr::P; use syntax::codemap::Span; @@ -135,7 +135,7 @@ enum TrackMatchMode { Conflicting, } -impl kinds::Copy for TrackMatchMode {} +impl marker::Copy for TrackMatchMode {} impl TrackMatchMode { // Builds up the whole match mode for a pattern from its constituent @@ -441,28 +441,12 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { } ast::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs] - match rhs.node { - ast::ExprRange(ref start, ref end) => { - // Hacked slicing syntax (KILLME). - let args = match (start, end) { - (&Some(ref e1), &Some(ref e2)) => vec![&**e1, &**e2], - (&Some(ref e), &None) => vec![&**e], - (&None, &Some(ref e)) => vec![&**e], - (&None, &None) => Vec::new() - }; - let overloaded = - self.walk_overloaded_operator(expr, &**lhs, args, PassArgs::ByRef); - assert!(overloaded); - } - _ => { - if !self.walk_overloaded_operator(expr, - &**lhs, - vec![&**rhs], - PassArgs::ByRef) { - self.select_from_expr(&**lhs); - self.consume_expr(&**rhs); - } - } + if !self.walk_overloaded_operator(expr, + &**lhs, + vec![&**rhs], + PassArgs::ByRef) { + self.select_from_expr(&**lhs); + self.consume_expr(&**rhs); } } @@ -864,12 +848,17 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { None => {} Some(method_ty) => { let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i)); - let self_ty = ty::ty_fn_args(method_ty)[0]; + + // the method call infrastructure should have + // replaced all late-bound regions with variables: + let self_ty = ty::ty_fn_sig(method_ty).input(0); + let self_ty = ty::assert_no_late_bound_regions(self.tcx(), &self_ty); + let (m, r) = match self_ty.sty { ty::ty_rptr(r, ref m) => (m.mutbl, r), _ => self.tcx().sess.span_bug(expr.span, format!("bad overloaded deref type {}", - method_ty.repr(self.tcx()))[]) + method_ty.repr(self.tcx())).index(&FullRange)) }; let bk = ty::BorrowKind::from_mutbl(m); self.delegate.borrow(expr.id, expr.span, cmt, @@ -1035,7 +1024,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { if pat_util::pat_is_binding(def_map, pat) { let tcx = typer.tcx(); - debug!("binding cmt_pat={} pat={} match_mode={}", + debug!("binding cmt_pat={} pat={} match_mode={:?}", cmt_pat.repr(tcx), pat.repr(tcx), match_mode); @@ -1171,10 +1160,10 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { // pattern. if !tcx.sess.has_errors() { - let msg = format!("Pattern has unexpected type: {} and type {}", + let msg = format!("Pattern has unexpected type: {:?} and type {}", def, cmt_pat.ty.repr(tcx)); - tcx.sess.span_bug(pat.span, msg[]) + tcx.sess.span_bug(pat.span, msg.as_slice()) } } @@ -1188,10 +1177,10 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { // reported. if !tcx.sess.has_errors() { - let msg = format!("Pattern has unexpected def: {} and type {}", + let msg = format!("Pattern has unexpected def: {:?} and type {}", def, cmt_pat.ty.repr(tcx)); - tcx.sess.span_bug(pat.span, msg[]) + tcx.sess.span_bug(pat.span, msg.index(&FullRange)) } } } diff --git a/src/librustc/middle/graph.rs b/src/librustc/middle/graph.rs index 52b6af7608170..30e0ce33018d0 100644 --- a/src/librustc/middle/graph.rs +++ b/src/librustc/middle/graph.rs @@ -55,7 +55,7 @@ pub struct Edge { impl Show for Edge { fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { - write!(f, "Edge {{ next_edge: [{}, {}], source: {}, target: {}, data: {} }}", + write!(f, "Edge {{ next_edge: [{:?}, {:?}], source: {:?}, target: {:?}, data: {:?} }}", self.next_edge[0], self.next_edge[1], self.source, self.target, self.data) } @@ -419,7 +419,7 @@ mod test { graph.each_incoming_edge(start_index, |edge_index, edge| { assert!(graph.edge_data(edge_index) == &edge.data); assert!(counter < expected_incoming.len()); - debug!("counter={} expected={} edge_index={} edge={}", + debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}", counter, expected_incoming[counter], edge_index, edge); match expected_incoming[counter] { (ref e, ref n) => { @@ -437,7 +437,7 @@ mod test { graph.each_outgoing_edge(start_index, |edge_index, edge| { assert!(graph.edge_data(edge_index) == &edge.data); assert!(counter < expected_outgoing.len()); - debug!("counter={} expected={} edge_index={} edge={}", + debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}", counter, expected_outgoing[counter], edge_index, edge); match expected_outgoing[counter] { (ref e, ref n) => { diff --git a/src/librustc/middle/infer/coercion.rs b/src/librustc/middle/infer/coercion.rs index 65de3a083d2d9..9f87e73d4af9d 100644 --- a/src/librustc/middle/infer/coercion.rs +++ b/src/librustc/middle/infer/coercion.rs @@ -265,7 +265,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { ty::mt{ty: ty, mutbl: mt_b.mutbl}); try!(self.get_ref().infcx.try(|_| sub.tys(ty, b))); debug!("Success, coerced with AutoDerefRef(1, \ - AutoPtr(AutoUnsize({})))", kind); + AutoPtr(AutoUnsize({:?})))", kind); Ok(Some(AdjustDerefRef(AutoDerefRef { autoderefs: 1, autoref: Some(ty::AutoPtr(r_borrow, mt_b.mutbl, @@ -288,7 +288,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { ty::mt{ty: ty, mutbl: mt_b.mutbl}); try!(self.get_ref().infcx.try(|_| sub.tys(ty, b))); debug!("Success, coerced with AutoDerefRef(1, \ - AutoPtr(AutoUnsize({})))", kind); + AutoPtr(AutoUnsize({:?})))", kind); Ok(Some(AdjustDerefRef(AutoDerefRef { autoderefs: 1, autoref: Some(ty::AutoUnsafe(mt_b.mutbl, @@ -306,7 +306,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { let ty = ty::mk_uniq(self.tcx(), ty); try!(self.get_ref().infcx.try(|_| sub.tys(ty, b))); debug!("Success, coerced with AutoDerefRef(1, \ - AutoUnsizeUniq({}))", kind); + AutoUnsizeUniq({:?}))", kind); Ok(Some(AdjustDerefRef(AutoDerefRef { autoderefs: 1, autoref: Some(ty::AutoUnsizeUniq(kind)) @@ -328,7 +328,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { a: Ty<'tcx>, ty_b: Ty<'tcx>) -> Option<(Ty<'tcx>, ty::UnsizeKind<'tcx>)> { - debug!("unsize_ty(a={}, ty_b={})", a, ty_b.repr(self.tcx())); + debug!("unsize_ty(a={:?}, ty_b={})", a, ty_b.repr(self.tcx())); let tcx = self.tcx(); @@ -406,7 +406,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { { let tcx = self.tcx(); - debug!("coerce_borrowed_object(a={}, b={}, b_mutbl={})", + debug!("coerce_borrowed_object(a={}, b={}, b_mutbl={:?})", a.repr(tcx), b.repr(tcx), b_mutbl); @@ -426,7 +426,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { { let tcx = self.tcx(); - debug!("coerce_unsafe_object(a={}, b={}, b_mutbl={})", + debug!("coerce_unsafe_object(a={}, b={}, b_mutbl={:?})", a.repr(tcx), b.repr(tcx), b_mutbl); @@ -449,7 +449,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { match a.sty { ty::ty_rptr(_, ty::mt{ty, mutbl}) => match ty.sty { ty::ty_trait(box ty::TyTrait { ref principal, ref bounds }) => { - debug!("mutbl={} b_mutbl={}", mutbl, b_mutbl); + debug!("mutbl={:?} b_mutbl={:?}", mutbl, b_mutbl); let tr = ty::mk_trait(tcx, principal.clone(), bounds.clone()); try!(self.subtype(mk_ty(tr), b)); Ok(Some(AdjustDerefRef(AutoDerefRef { diff --git a/src/librustc/middle/infer/combine.rs b/src/librustc/middle/infer/combine.rs index dd711fcbf022e..22975f54a9fb4 100644 --- a/src/librustc/middle/infer/combine.rs +++ b/src/librustc/middle/infer/combine.rs @@ -142,7 +142,7 @@ pub trait Combine<'tcx> : Sized { for _ in a_regions.iter() { invariance.push(ty::Invariant); } - invariance[] + invariance.index(&FullRange) } }; @@ -361,7 +361,7 @@ pub trait Combine<'tcx> : Sized { a: ty::TraitStore, b: ty::TraitStore) -> cres<'tcx, ty::TraitStore> { - debug!("{}.trait_stores(a={}, b={})", self.tag(), a, b); + debug!("{}.trait_stores(a={:?}, b={:?})", self.tag(), a, b); match (a, b) { (ty::RegionTraitStore(a_r, a_m), @@ -471,7 +471,7 @@ pub fn super_tys<'tcx, C: Combine<'tcx>>(this: &C, let tcx = this.infcx().tcx; let a_sty = &a.sty; let b_sty = &b.sty; - debug!("super_tys: a_sty={} b_sty={}", a_sty, b_sty); + debug!("super_tys: a_sty={:?} b_sty={:?}", a_sty, b_sty); return match (a_sty, b_sty) { // The "subtype" ought to be handling cases involving var: (&ty::ty_infer(TyVar(_)), _) | @@ -480,7 +480,7 @@ pub fn super_tys<'tcx, C: Combine<'tcx>>(this: &C, format!("{}: bot and var types should have been handled ({},{})", this.tag(), a.repr(this.infcx().tcx), - b.repr(this.infcx().tcx))[]); + b.repr(this.infcx().tcx)).index(&FullRange)); } (&ty::ty_err, _) | (_, &ty::ty_err) => { @@ -550,7 +550,7 @@ pub fn super_tys<'tcx, C: Combine<'tcx>>(this: &C, (&ty::ty_trait(ref a_), &ty::ty_trait(ref b_)) => { - debug!("Trying to match traits {} and {}", a, b); + debug!("Trying to match traits {:?} and {:?}", a, b); let principal = try!(this.binders(&a_.principal, &b_.principal)); let bounds = try!(this.existential_bounds(&a_.bounds, &b_.bounds)); Ok(ty::mk_trait(tcx, principal, bounds)) @@ -724,7 +724,7 @@ impl<'f, 'tcx> CombineFields<'f, 'tcx> { Some(e) => e, }; - debug!("instantiate(a_ty={} dir={} b_vid={})", + debug!("instantiate(a_ty={} dir={:?} b_vid={})", a_ty.repr(tcx), dir, b_vid.repr(tcx)); @@ -745,7 +745,7 @@ impl<'f, 'tcx> CombineFields<'f, 'tcx> { self.generalize(a_ty, b_vid, true) } }); - debug!("instantiate(a_ty={}, dir={}, \ + debug!("instantiate(a_ty={}, dir={:?}, \ b_vid={}, generalized_ty={})", a_ty.repr(tcx), dir, b_vid.repr(tcx), generalized_ty.repr(tcx)); @@ -856,7 +856,7 @@ impl<'cx, 'tcx> ty_fold::TypeFolder<'tcx> for Generalizer<'cx, 'tcx> { self.tcx().sess.span_bug( self.span, format!("Encountered early bound region when generalizing: {}", - r.repr(self.tcx()))[]); + r.repr(self.tcx())).index(&FullRange)); } // Always make a fresh region variable for skolemized regions; diff --git a/src/librustc/middle/infer/error_reporting.rs b/src/librustc/middle/infer/error_reporting.rs index e58ff53b00cb9..64b3c14ff699d 100644 --- a/src/librustc/middle/infer/error_reporting.rs +++ b/src/librustc/middle/infer/error_reporting.rs @@ -200,9 +200,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { ref trace_origins, ref same_regions) => { if !same_regions.is_empty() { - self.report_processed_errors(var_origins[], - trace_origins[], - same_regions[]); + self.report_processed_errors(var_origins.index(&FullRange), + trace_origins.index(&FullRange), + same_regions.index(&FullRange)); } } } @@ -268,7 +268,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { } } let pe = ProcessedErrors(var_origins, trace_origins, same_regions); - debug!("errors processed: {}", pe); + debug!("errors processed: {:?}", pe); processed_errors.push(pe); } return processed_errors; @@ -297,7 +297,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { sub: Region, sup: Region) -> Option { - debug!("free_regions_from_same_fn(sub={}, sup={})", sub, sup); + debug!("free_regions_from_same_fn(sub={:?}, sup={:?})", sub, sup); let (scope_id, fr1, fr2) = match (sub, sup) { (ReFree(fr1), ReFree(fr2)) => { if fr1.scope != fr2.scope { @@ -376,7 +376,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { format!("{}: {} ({})", message_root_str, expected_found_str, - ty::type_err_to_str(self.tcx, terr))[]); + ty::type_err_to_str(self.tcx, terr)).index(&FullRange)); match trace.origin { infer::MatchExpressionArm(_, arm_span) => @@ -445,25 +445,25 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { // Does the required lifetime have a nice name we can print? self.tcx.sess.span_err( origin.span(), - format!("{} may not live long enough", labeled_user_string)[]); + format!("{} may not live long enough", labeled_user_string).index(&FullRange)); self.tcx.sess.span_help( origin.span(), format!( "consider adding an explicit lifetime bound `{}: {}`...", bound_kind.user_string(self.tcx), - sub.user_string(self.tcx))[]); + sub.user_string(self.tcx)).index(&FullRange)); } ty::ReStatic => { // Does the required lifetime have a nice name we can print? self.tcx.sess.span_err( origin.span(), - format!("{} may not live long enough", labeled_user_string)[]); + format!("{} may not live long enough", labeled_user_string).index(&FullRange)); self.tcx.sess.span_help( origin.span(), format!( "consider adding an explicit lifetime bound `{}: 'static`...", - bound_kind.user_string(self.tcx))[]); + bound_kind.user_string(self.tcx)).index(&FullRange)); } _ => { @@ -472,15 +472,15 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { origin.span(), format!( "{} may not live long enough", - labeled_user_string)[]); + labeled_user_string).index(&FullRange)); self.tcx.sess.span_help( origin.span(), format!( "consider adding an explicit lifetime bound for `{}`", - bound_kind.user_string(self.tcx))[]); + bound_kind.user_string(self.tcx)).index(&FullRange)); note_and_explain_region( self.tcx, - format!("{} must be valid for ", labeled_user_string)[], + format!("{} must be valid for ", labeled_user_string).index(&FullRange), sub, "..."); } @@ -522,7 +522,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string())[]); + .to_string()).index(&FullRange)); note_and_explain_region( self.tcx, "...the borrowed pointer is valid for ", @@ -534,7 +534,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string())[], + .to_string()).index(&FullRange), sup, ""); } @@ -580,7 +580,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { outlive the enclosing closure", ty::local_var_name_str(self.tcx, id).get() - .to_string())[]); + .to_string()).index(&FullRange)); note_and_explain_region( self.tcx, "captured variable is valid for ", @@ -622,7 +622,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { span, format!("the type `{}` does not fulfill the \ required lifetime", - self.ty_to_string(ty))[]); + self.ty_to_string(ty)).index(&FullRange)); note_and_explain_region(self.tcx, "type must outlive ", sub, @@ -648,7 +648,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { span, format!("the type `{}` (provided as the value of \ a type parameter) is not valid at this point", - self.ty_to_string(ty))[]); + self.ty_to_string(ty)).index(&FullRange)); note_and_explain_region(self.tcx, "type must outlive ", sub, @@ -714,7 +714,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { span, format!("type of expression contains references \ that are not valid during the expression: `{}`", - self.ty_to_string(t))[]); + self.ty_to_string(t)).index(&FullRange)); note_and_explain_region( self.tcx, "type is only valid for ", @@ -736,7 +736,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { span, format!("in type `{}`, reference has a longer lifetime \ than the data it references", - self.ty_to_string(ty))[]); + self.ty_to_string(ty)).index(&FullRange)); note_and_explain_region( self.tcx, "the pointer is valid for ", @@ -861,7 +861,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { let (fn_decl, generics, unsafety, ident, expl_self, span) = node_inner.expect("expect item fn"); let taken = lifetimes_in_scope(self.tcx, scope_id); - let life_giver = LifeGiver::with_taken(taken[]); + let life_giver = LifeGiver::with_taken(taken.index(&FullRange)); let rebuilder = Rebuilder::new(self.tcx, fn_decl, expl_self, generics, same_regions, &life_giver); let (fn_decl, expl_self, generics) = rebuilder.rebuild(); @@ -937,7 +937,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { } expl_self_opt = self.rebuild_expl_self(expl_self_opt, lifetime, &anon_nums, ®ion_names); - inputs = self.rebuild_args_ty(inputs[], lifetime, + inputs = self.rebuild_args_ty(inputs.index(&FullRange), lifetime, &anon_nums, ®ion_names); output = self.rebuild_output(&output, lifetime, &anon_nums, ®ion_names); ty_params = self.rebuild_ty_params(ty_params, lifetime, @@ -972,7 +972,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { names.push(lt_name); } names.sort(); - let name = token::str_to_ident(names[0][]).name; + let name = token::str_to_ident(names[0].index(&FullRange)).name; return (name_to_dummy_lifetime(name), Kept); } return (self.life_giver.give_lifetime(), Fresh); @@ -1222,7 +1222,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { .sess .fatal(format!( "unbound path {}", - pprust::path_to_string(path))[]) + pprust::path_to_string(path)).index(&FullRange)) } Some(&d) => d }; @@ -1420,7 +1420,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { opt_explicit_self, generics); let msg = format!("consider using an explicit lifetime \ parameter as shown: {}", suggested_fn); - self.tcx.sess.span_help(span, msg[]); + self.tcx.sess.span_help(span, msg.index(&FullRange)); } fn report_inference_failure(&self, @@ -1463,7 +1463,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { var_origin.span(), format!("cannot infer an appropriate lifetime{} \ due to conflicting requirements", - var_description)[]); + var_description).index(&FullRange)); } fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) { @@ -1511,7 +1511,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { self.tcx.sess.span_note( trace.origin.span(), format!("...so that {} ({})", - desc, values_str)[]); + desc, values_str).index(&FullRange)); } None => { // Really should avoid printing this error at @@ -1520,7 +1520,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { // doing right now. - nmatsakis self.tcx.sess.span_note( trace.origin.span(), - format!("...so that {}", desc)[]); + format!("...so that {}", desc).index(&FullRange)); } } } @@ -1537,7 +1537,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { "...so that closure can access `{}`", ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string())[]) + .to_string()).index(&FullRange)) } infer::InfStackClosure(span) => { self.tcx.sess.span_note( @@ -1562,7 +1562,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { does not outlive the enclosing closure", ty::local_var_name_str( self.tcx, - id).get().to_string())[]); + id).get().to_string()).index(&FullRange)); } infer::IndexSlice(span) => { self.tcx.sess.span_note( @@ -1606,7 +1606,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { span, format!("...so type `{}` of expression is valid during the \ expression", - self.ty_to_string(t))[]); + self.ty_to_string(t)).index(&FullRange)); } infer::BindingTypeIsNotValidAtDecl(span) => { self.tcx.sess.span_note( @@ -1618,14 +1618,14 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { span, format!("...so that the reference type `{}` \ does not outlive the data it points at", - self.ty_to_string(ty))[]); + self.ty_to_string(ty)).index(&FullRange)); } infer::RelateParamBound(span, t) => { self.tcx.sess.span_note( span, format!("...so that the type `{}` \ will meet the declared lifetime bounds", - self.ty_to_string(t))[]); + self.ty_to_string(t)).index(&FullRange)); } infer::RelateDefaultParamBound(span, t) => { self.tcx.sess.span_note( @@ -1633,13 +1633,13 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { format!("...so that type parameter \ instantiated with `{}`, \ will meet its declared lifetime bounds", - self.ty_to_string(t))[]); + self.ty_to_string(t)).index(&FullRange)); } infer::RelateRegionParamBound(span) => { self.tcx.sess.span_note( span, format!("...so that the declared lifetime parameter bounds \ - are satisfied")[]); + are satisfied").index(&FullRange)); } } } @@ -1691,7 +1691,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt, Some(node) => match node { ast_map::NodeItem(item) => match item.node { ast::ItemFn(_, _, _, ref gen, _) => { - taken.push_all(gen.lifetimes[]); + taken.push_all(gen.lifetimes.index(&FullRange)); None }, _ => None @@ -1699,7 +1699,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt, ast_map::NodeImplItem(ii) => { match *ii { ast::MethodImplItem(ref m) => { - taken.push_all(m.pe_generics().lifetimes[]); + taken.push_all(m.pe_generics().lifetimes.index(&FullRange)); Some(m.id) } ast::TypeImplItem(_) => None, @@ -1758,10 +1758,10 @@ impl LifeGiver { let mut lifetime; loop { let mut s = String::from_str("'"); - s.push_str(num_to_string(self.counter.get())[]); + s.push_str(num_to_string(self.counter.get()).index(&FullRange)); if !self.taken.contains(&s) { lifetime = name_to_dummy_lifetime( - token::str_to_ident(s[]).name); + token::str_to_ident(s.index(&FullRange)).name); self.generated.borrow_mut().push(lifetime); break; } diff --git a/src/librustc/middle/infer/freshen.rs b/src/librustc/middle/infer/freshen.rs index 608ae31475327..02c52f8296761 100644 --- a/src/librustc/middle/infer/freshen.rs +++ b/src/librustc/middle/infer/freshen.rs @@ -66,7 +66,7 @@ impl<'a, 'tcx> TypeFreshener<'a, 'tcx> { None => { } } - match self.freshen_map.entry(&key) { + match self.freshen_map.entry(key) { Entry::Occupied(entry) => *entry.get(), Entry::Vacant(entry) => { let index = self.freshen_count; diff --git a/src/librustc/middle/infer/higher_ranked/mod.rs b/src/librustc/middle/infer/higher_ranked/mod.rs index bf0a9cfbea66d..073052dd36870 100644 --- a/src/librustc/middle/infer/higher_ranked/mod.rs +++ b/src/librustc/middle/infer/higher_ranked/mod.rs @@ -154,7 +154,7 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C // Regions that pre-dated the LUB computation stay as they are. if !is_var_in_set(new_vars, r0) { assert!(!r0.is_bound()); - debug!("generalize_region(r0={}): not new variable", r0); + debug!("generalize_region(r0={:?}): not new variable", r0); return r0; } @@ -164,8 +164,8 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C // *related* to regions that pre-date the LUB computation // stay as they are. if !tainted.iter().all(|r| is_var_in_set(new_vars, *r)) { - debug!("generalize_region(r0={}): \ - non-new-variables found in {}", + debug!("generalize_region(r0={:?}): \ + non-new-variables found in {:?}", r0, tainted); assert!(!r0.is_bound()); return r0; @@ -178,8 +178,8 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C // with. for (a_br, a_r) in a_map.iter() { if tainted.iter().any(|x| x == a_r) { - debug!("generalize_region(r0={}): \ - replacing with {}, tainted={}", + debug!("generalize_region(r0={:?}): \ + replacing with {:?}, tainted={:?}", r0, *a_br, tainted); return ty::ReLateBound(debruijn, *a_br); } @@ -187,9 +187,9 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C infcx.tcx.sess.span_bug( span, - format!("region {} is not associated with \ + format!("region {:?} is not associated with \ any bound region from A!", - r0)[]) + r0).index(&FullRange)) } } @@ -322,7 +322,7 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C } infcx.tcx.sess.span_bug( span, - format!("could not find original bound region for {}", r)[]); + format!("could not find original bound region for {:?}", r).index(&FullRange)); } fn fresh_bound_variable(infcx: &InferCtxt, debruijn: ty::DebruijnIndex) -> ty::Region { @@ -339,7 +339,7 @@ fn var_ids<'tcx, T: Combine<'tcx>>(combiner: &T, r => { combiner.infcx().tcx.sess.span_bug( combiner.trace().origin.span(), - format!("found non-region-vid: {}", r)[]); + format!("found non-region-vid: {:?}", r).index(&FullRange)); } }).collect() } diff --git a/src/librustc/middle/infer/mod.rs b/src/librustc/middle/infer/mod.rs index c2db81d311483..3f18af3d768e4 100644 --- a/src/librustc/middle/infer/mod.rs +++ b/src/librustc/middle/infer/mod.rs @@ -989,7 +989,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { err: Option<&ty::type_err<'tcx>>) where M: FnOnce(Option, String) -> String, { - debug!("hi! expected_ty = {}, actual_ty = {}", expected_ty, actual_ty); + debug!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty); let resolved_expected = expected_ty.map(|e_ty| self.resolve_type_vars_if_possible(&e_ty)); @@ -1002,7 +1002,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { self.tcx.sess.span_err(sp, format!("{}{}", mk_msg(resolved_expected.map(|t| self.ty_to_string(t)), actual_ty), - error_str)[]); + error_str).index(&FullRange)); for err in err.iter() { ty::note_and_explain_type_err(self.tcx, *err) @@ -1219,7 +1219,7 @@ impl<'tcx> Repr<'tcx> for SubregionOrigin<'tcx> { } Reborrow(a) => format!("Reborrow({})", a.repr(tcx)), ReborrowUpvar(a, b) => { - format!("ReborrowUpvar({},{})", a.repr(tcx), b) + format!("ReborrowUpvar({},{:?})", a.repr(tcx), b) } ReferenceOutlivesReferent(_, a) => { format!("ReferenceOutlivesReferent({})", a.repr(tcx)) @@ -1277,7 +1277,7 @@ impl<'tcx> Repr<'tcx> for RegionVariableOrigin<'tcx> { format!("EarlyBoundRegion({},{})", a.repr(tcx), b.repr(tcx)) } LateBoundRegion(a, b, c) => { - format!("LateBoundRegion({},{},{})", a.repr(tcx), b.repr(tcx), c) + format!("LateBoundRegion({},{},{:?})", a.repr(tcx), b.repr(tcx), c) } BoundRegionInCoherence(a) => { format!("bound_regionInCoherence({})", a.repr(tcx)) diff --git a/src/librustc/middle/infer/region_inference/graphviz.rs b/src/librustc/middle/infer/region_inference/graphviz.rs index 98c69962bc288..29feaf358e2e2 100644 --- a/src/librustc/middle/infer/region_inference/graphviz.rs +++ b/src/librustc/middle/infer/region_inference/graphviz.rs @@ -67,7 +67,7 @@ pub fn maybe_print_constraints_for<'a, 'tcx>(region_vars: &RegionVarBindings<'a, } let requested_output = os::getenv("RUST_REGION_GRAPH"); - debug!("requested_output: {} requested_node: {}", + debug!("requested_output: {:?} requested_node: {:?}", requested_output, requested_node); let output_path = { @@ -137,7 +137,7 @@ impl<'a, 'tcx> ConstraintGraph<'a, 'tcx> { let mut node_ids = FnvHashMap::new(); { let mut add_node = |&mut : node| { - if let Vacant(e) = node_ids.entry(&node) { + if let Vacant(e) = node_ids.entry(node) { e.insert(i); i += 1; } @@ -166,7 +166,7 @@ impl<'a, 'tcx> dot::Labeller<'a, Node, Edge> for ConstraintGraph<'a, 'tcx> { fn node_label(&self, n: &Node) -> dot::LabelText { match *n { Node::RegionVid(n_vid) => - dot::LabelText::label(format!("{}", n_vid)), + dot::LabelText::label(format!("{:?}", n_vid)), Node::Region(n_rgn) => dot::LabelText::label(format!("{}", n_rgn.repr(self.tcx))), } @@ -204,12 +204,12 @@ impl<'a, 'tcx> dot::GraphWalk<'a, Node, Edge> for ConstraintGraph<'a, 'tcx> { } fn source(&self, edge: &Edge) -> Node { let (n1, _) = constraint_to_nodes(edge); - debug!("edge {} has source {}", edge, n1); + debug!("edge {:?} has source {:?}", edge, n1); n1 } fn target(&self, edge: &Edge) -> Node { let (_, n2) = constraint_to_nodes(edge); - debug!("edge {} has target {}", edge, n2); + debug!("edge {:?} has target {:?}", edge, n2); n2 } } diff --git a/src/librustc/middle/infer/region_inference/mod.rs b/src/librustc/middle/infer/region_inference/mod.rs index d30a6ff1cd9d5..bdc787e4d589c 100644 --- a/src/librustc/middle/infer/region_inference/mod.rs +++ b/src/librustc/middle/infer/region_inference/mod.rs @@ -273,7 +273,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { } pub fn rollback_to(&self, snapshot: RegionSnapshot) { - debug!("RegionVarBindings: rollback_to({})", snapshot); + debug!("RegionVarBindings: rollback_to({:?})", snapshot); let mut undo_log = self.undo_log.borrow_mut(); assert!(undo_log.len() > snapshot.length); assert!((*undo_log)[snapshot.length] == OpenSnapshot); @@ -325,7 +325,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { if self.in_snapshot() { self.undo_log.borrow_mut().push(AddVar(vid)); } - debug!("created new region variable {} with origin {}", + debug!("created new region variable {:?} with origin {}", vid, origin.repr(self.tcx)); return vid; } @@ -427,7 +427,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { let mut givens = self.givens.borrow_mut(); if givens.insert((sub, sup)) { - debug!("add_given({} <= {})", + debug!("add_given({} <= {:?})", sub.repr(self.tcx), sup); @@ -475,7 +475,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { origin.span(), format!("cannot relate bound region: {} <= {}", sub.repr(self.tcx), - sup.repr(self.tcx))[]); + sup.repr(self.tcx)).index(&FullRange)); } (_, ReStatic) => { // all regions are subregions of static, so we can ignore this @@ -565,7 +565,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { } Some(ref values) => { let r = lookup(values, rid); - debug!("resolve_var({}) = {}", rid, r.repr(self.tcx)); + debug!("resolve_var({:?}) = {}", rid, r.repr(self.tcx)); r } } @@ -602,7 +602,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { } relate(self, a, ReInfer(ReVar(c))); relate(self, b, ReInfer(ReVar(c))); - debug!("combine_vars() c={}", c); + debug!("combine_vars() c={:?}", c); ReInfer(ReVar(c)) } @@ -623,7 +623,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { /// made---`r0` itself will be the first entry. This is used when checking whether skolemized /// regions are being improperly related to other regions. pub fn tainted(&self, mark: &RegionSnapshot, r0: Region) -> Vec { - debug!("tainted(mark={}, r0={})", mark, r0.repr(self.tcx)); + debug!("tainted(mark={:?}, r0={})", mark, r0.repr(self.tcx)); let _indenter = indenter(); // `result_set` acts as a worklist: we explore all outgoing @@ -634,7 +634,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { while result_index < result_set.len() { // nb: can't use uint::range() here because result_set grows let r = result_set[result_index]; - debug!("result_index={}, r={}", result_index, r); + debug!("result_index={}, r={:?}", result_index, r); for undo_entry in self.undo_log.borrow().slice_from(mark.length).iter() @@ -736,7 +736,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.tcx.sess.bug( format!("cannot relate bound region: LUB({}, {})", a.repr(self.tcx), - b.repr(self.tcx))[]); + b.repr(self.tcx)).index(&FullRange)); } (ReStatic, _) | (_, ReStatic) => { @@ -751,9 +751,9 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.tcx.sess.span_bug( (*self.var_origins.borrow())[v_id.index as uint].span(), format!("lub_concrete_regions invoked with \ - non-concrete regions: {}, {}", + non-concrete regions: {:?}, {:?}", a, - b)[]); + b).index(&FullRange)); } (ReFree(ref fr), ReScope(s_id)) | @@ -827,7 +827,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { a: Region, b: Region) -> cres<'tcx, Region> { - debug!("glb_concrete_regions({}, {})", a, b); + debug!("glb_concrete_regions({:?}, {:?})", a, b); match (a, b) { (ReLateBound(..), _) | (_, ReLateBound(..)) | @@ -836,7 +836,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.tcx.sess.bug( format!("cannot relate bound region: GLB({}, {})", a.repr(self.tcx), - b.repr(self.tcx))[]); + b.repr(self.tcx)).index(&FullRange)); } (ReStatic, r) | (r, ReStatic) => { @@ -854,9 +854,9 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.tcx.sess.span_bug( (*self.var_origins.borrow())[v_id.index as uint].span(), format!("glb_concrete_regions invoked with \ - non-concrete regions: {}, {}", + non-concrete regions: {:?}, {:?}", a, - b)[]); + b).index(&FullRange)); } (ReFree(ref fr), ReScope(s_id)) | @@ -932,7 +932,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { // scopes or two free regions. So, if one of // these scopes is a subscope of the other, return // it. Otherwise fail. - debug!("intersect_scopes(scope_a={}, scope_b={}, region_a={}, region_b={})", + debug!("intersect_scopes(scope_a={:?}, scope_b={:?}, region_a={:?}, region_b={:?})", scope_a, scope_b, region_a, region_b); match self.tcx.region_maps.nearest_common_ancestor(scope_a, scope_b) { Some(r_id) if scope_a == r_id => Ok(ReScope(scope_b)), @@ -971,13 +971,13 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { // Dorky hack to cause `dump_constraints` to only get called // if debug mode is enabled: - debug!("----() End constraint listing {}---", self.dump_constraints()); + debug!("----() End constraint listing {:?}---", self.dump_constraints()); graphviz::maybe_print_constraints_for(self, subject); self.expansion(var_data.as_mut_slice()); self.contraction(var_data.as_mut_slice()); let values = - self.extract_values_and_collect_conflicts(var_data[], + self.extract_values_and_collect_conflicts(var_data.index(&FullRange), errors); self.collect_concrete_region_errors(&values, errors); values @@ -1039,7 +1039,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { b_data: &mut VarData) -> bool { - debug!("expand_node({}, {} == {})", + debug!("expand_node({}, {:?} == {})", a_region.repr(self.tcx), b_vid, b_data.value.repr(self.tcx)); @@ -1058,7 +1058,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { b_data.classification = Expanding; match b_data.value { NoValue => { - debug!("Setting initial value of {} to {}", + debug!("Setting initial value of {:?} to {}", b_vid, a_region.repr(self.tcx)); b_data.value = Value(a_region); @@ -1071,7 +1071,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { return false; } - debug!("Expanding value of {} from {} to {}", + debug!("Expanding value of {:?} from {} to {}", b_vid, cur_region.repr(self.tcx), lub.repr(self.tcx)); @@ -1122,7 +1122,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { a_data: &mut VarData, b_region: Region) -> bool { - debug!("contract_node({} == {}/{}, {})", + debug!("contract_node({:?} == {}/{:?}, {})", a_vid, a_data.value.repr(self.tcx), a_data.classification, b_region.repr(self.tcx)); @@ -1156,7 +1156,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { b_region: Region) -> bool { if !this.is_subregion_of(a_region, b_region) { - debug!("Setting {} to ErrorValue: {} not subregion of {}", + debug!("Setting {:?} to ErrorValue: {} not subregion of {}", a_vid, a_region.repr(this.tcx), b_region.repr(this.tcx)); @@ -1176,7 +1176,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { if glb == a_region { false } else { - debug!("Contracting value of {} from {} to {}", + debug!("Contracting value of {:?} from {} to {}", a_vid, a_region.repr(this.tcx), glb.repr(this.tcx)); @@ -1185,7 +1185,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { } } Err(_) => { - debug!("Setting {} to ErrorValue: no glb of {}, {}", + debug!("Setting {:?} to ErrorValue: no glb of {}, {}", a_vid, a_region.repr(this.tcx), b_region.repr(this.tcx)); @@ -1412,10 +1412,10 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.tcx.sess.span_bug( (*self.var_origins.borrow())[node_idx.index as uint].span(), format!("collect_error_for_expanding_node() could not find error \ - for var {}, lower_bounds={}, upper_bounds={}", + for var {:?}, lower_bounds={}, upper_bounds={}", node_idx, lower_bounds.repr(self.tcx), - upper_bounds.repr(self.tcx))[]); + upper_bounds.repr(self.tcx)).index(&FullRange)); } fn collect_error_for_contracting_node( @@ -1457,9 +1457,9 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.tcx.sess.span_bug( (*self.var_origins.borrow())[node_idx.index as uint].span(), format!("collect_error_for_contracting_node() could not find error \ - for var {}, upper_bounds={}", + for var {:?}, upper_bounds={}", node_idx, - upper_bounds.repr(self.tcx))[]); + upper_bounds.repr(self.tcx)).index(&FullRange)); } fn collect_concrete_regions(&self, @@ -1498,8 +1498,8 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { state.dup_found = true; } - debug!("collect_concrete_regions(orig_node_idx={}, node_idx={}, \ - classification={})", + debug!("collect_concrete_regions(orig_node_idx={:?}, node_idx={:?}, \ + classification={:?})", orig_node_idx, node_idx, classification); // figure out the direction from which this node takes its @@ -1520,7 +1520,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { graph: &RegionGraph, source_vid: RegionVid, dir: Direction) { - debug!("process_edges(source_vid={}, dir={})", source_vid, dir); + debug!("process_edges(source_vid={:?}, dir={:?})", source_vid, dir); let source_node_index = NodeIndex(source_vid.index as uint); graph.each_adjacent_edge(source_node_index, dir, |_, edge| { diff --git a/src/librustc/middle/infer/resolve.rs b/src/librustc/middle/infer/resolve.rs index 3ed866d4aba8f..9035d72e9a2fb 100644 --- a/src/librustc/middle/infer/resolve.rs +++ b/src/librustc/middle/infer/resolve.rs @@ -96,7 +96,7 @@ impl<'a, 'tcx> ty_fold::TypeFolder<'tcx> for FullTypeResolver<'a, 'tcx> { ty::ty_infer(_) => { self.infcx.tcx.sess.bug( format!("Unexpected type in full type resolver: {}", - t.repr(self.infcx.tcx))[]); + t.repr(self.infcx.tcx)).index(&FullRange)); } _ => { ty_fold::super_fold_ty(self, t) diff --git a/src/librustc/middle/infer/type_variable.rs b/src/librustc/middle/infer/type_variable.rs index 2aacc863f54c9..3f3e4c50e7047 100644 --- a/src/librustc/middle/infer/type_variable.rs +++ b/src/librustc/middle/infer/type_variable.rs @@ -184,7 +184,7 @@ impl<'tcx> TypeVariableTable<'tcx> { let escaping_type = self.probe(vid).unwrap(); escaping_types.push(escaping_type); } - debug!("SpecifyVar({}) new_elem_threshold={}", vid, new_elem_threshold); + debug!("SpecifyVar({:?}) new_elem_threshold={}", vid, new_elem_threshold); } _ => { } diff --git a/src/librustc/middle/infer/unify.rs b/src/librustc/middle/infer/unify.rs index 73da96445934c..4fa8e07ddd4f1 100644 --- a/src/librustc/middle/infer/unify.rs +++ b/src/librustc/middle/infer/unify.rs @@ -10,7 +10,7 @@ pub use self::VarValue::*; -use std::kinds::marker; +use std::marker; use middle::ty::{expected_found, IntVarValue}; use middle::ty::{self, Ty}; @@ -129,7 +129,7 @@ impl<'tcx, V:PartialEq+Clone+Repr<'tcx>, K:UnifyKey<'tcx, V>> UnificationTable K { let index = self.values.push(Root(value, 0)); let k = UnifyKey::from_index(index); - debug!("{}: created new key: {}", + debug!("{}: created new key: {:?}", UnifyKey::tag(None::), k); k diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs index a83416667abdc..bd96a8a0f2cd7 100644 --- a/src/librustc/middle/intrinsicck.rs +++ b/src/librustc/middle/intrinsicck.rs @@ -205,11 +205,11 @@ impl<'a, 'tcx> IntrinsicCheckingVisitor<'a, 'tcx> { debug!("with_each_combination(substs={})", substs.repr(self.tcx)); - callback.call_mut((substs,)); + callback(substs); } Some((space, index, ¶m_ty)) => { - debug!("with_each_combination: space={}, index={}, param_ty={}", + debug!("with_each_combination: space={:?}, index={}, param_ty={}", space, index, param_ty.repr(self.tcx)); if !ty::type_is_sized(param_env, span, param_ty) { diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index bbb11b9f93bbd..2f81d8c0f701e 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -266,8 +266,6 @@ lets_do_this! { ShrTraitLangItem, "shr", shr_trait; IndexTraitLangItem, "index", index_trait; IndexMutTraitLangItem, "index_mut", index_mut_trait; - SliceTraitLangItem, "slice", slice_trait; - SliceMutTraitLangItem, "slice_mut", slice_mut_trait; RangeStructLangItem, "range", range_struct; RangeFromStructLangItem, "range_from", range_from_struct; RangeToStructLangItem, "range_to", range_to_struct; diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 77875139be3a3..850033b3ed126 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -112,6 +112,7 @@ use self::VarKind::*; use middle::def::*; use middle::mem_categorization::Typer; use middle::pat_util; +use middle::region::CodeExtent; use middle::ty; use middle::ty::UnboxedClosureTyper; use lint; @@ -289,7 +290,7 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> { self.lnks.push(lnk); self.num_live_nodes += 1; - debug!("{} is of kind {}", ln.to_string(), + debug!("{:?} is of kind {}", ln, live_node_kind_to_string(lnk, self.tcx)); ln @@ -299,7 +300,7 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> { let ln = self.add_live_node(lnk); self.live_node_map.insert(node_id, ln); - debug!("{} is node {}", ln.to_string(), node_id); + debug!("{:?} is node {}", ln, node_id); } fn add_variable(&mut self, vk: VarKind) -> Variable { @@ -314,7 +315,7 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> { ImplicitRet | CleanExit => {} } - debug!("{} is {}", v.to_string(), vk); + debug!("{:?} is {:?}", v, vk); v } @@ -326,7 +327,7 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> { self.tcx .sess .span_bug(span, format!("no variable registered for id {}", - node_id)[]); + node_id).index(&FullRange)); } } } @@ -377,7 +378,7 @@ fn visit_fn(ir: &mut IrMaps, // swap in a new set of IR maps for this function body: let mut fn_maps = IrMaps::new(ir.tcx); - debug!("creating fn_maps: {}", &fn_maps as *const IrMaps); + debug!("creating fn_maps: {:?}", &fn_maps as *const IrMaps); for arg in decl.inputs.iter() { pat_util::pat_bindings(&ir.tcx.def_map, @@ -430,7 +431,7 @@ fn visit_local(ir: &mut IrMaps, local: &ast::Local) { fn visit_arm(ir: &mut IrMaps, arm: &ast::Arm) { for pat in arm.pats.iter() { pat_util::pat_bindings(&ir.tcx.def_map, &**pat, |bm, p_id, sp, path1| { - debug!("adding local variable {} from match with bm {}", + debug!("adding local variable {} from match with bm {:?}", p_id, bm); let name = path1.node; ir.add_live_node_for_node(p_id, VarDefNode(sp)); @@ -448,7 +449,7 @@ fn visit_expr(ir: &mut IrMaps, expr: &Expr) { // live nodes required for uses or definitions of variables: ast::ExprPath(_) => { let def = ir.tcx.def_map.borrow()[expr.id].clone(); - debug!("expr {}: path that leads to {}", expr.id, def); + debug!("expr {}: path that leads to {:?}", expr.id, def); if let DefLocal(..) = def { ir.add_live_node_for_node(expr.id, ExprNode(expr.span)); } @@ -491,7 +492,7 @@ fn visit_expr(ir: &mut IrMaps, expr: &Expr) { } ast::ExprForLoop(ref pat, _, _, _) => { pat_util::pat_bindings(&ir.tcx.def_map, &**pat, |bm, p_id, sp, path1| { - debug!("adding local variable {} from for loop with bm {}", + debug!("adding local variable {} from for loop with bm {:?}", p_id, bm); let name = path1.node; ir.add_live_node_for_node(p_id, VarDefNode(sp)); @@ -597,7 +598,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.ir.tcx.sess.span_bug( span, format!("no live node registered for node {}", - node_id)[]); + node_id).index(&FullRange)); } } } @@ -702,7 +703,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { for var_idx in range(0u, self.ir.num_vars) { let idx = node_base_idx + var_idx; if test(idx).is_valid() { - try!(write!(wr, " {}", Variable(var_idx).to_string())); + try!(write!(wr, " {:?}", Variable(var_idx))); } } Ok(()) @@ -740,11 +741,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { let mut wr = Vec::new(); { let wr = &mut wr as &mut io::Writer; - write!(wr, "[ln({}) of kind {} reads", ln.get(), self.ir.lnk(ln)); + write!(wr, "[ln({:?}) of kind {:?} reads", ln.get(), self.ir.lnk(ln)); self.write_vars(wr, ln, |idx| self.users[idx].reader); write!(wr, " writes"); self.write_vars(wr, ln, |idx| self.users[idx].writer); - write!(wr, " precedes {}]", self.successors[ln.get()].to_string()); + write!(wr, " precedes {:?}]", self.successors[ln.get()]); } String::from_utf8(wr).unwrap() } @@ -792,8 +793,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } }); - debug!("merge_from_succ(ln={}, succ={}, first_merge={}, changed={})", - ln.to_string(), self.ln_str(succ_ln), first_merge, changed); + debug!("merge_from_succ(ln={:?}, succ={}, first_merge={}, changed={})", + ln, self.ln_str(succ_ln), first_merge, changed); return changed; fn copy_if_invalid(src: LiveNode, dst: &mut LiveNode) -> bool { @@ -814,14 +815,14 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.users[idx].reader = invalid_node(); self.users[idx].writer = invalid_node(); - debug!("{} defines {} (idx={}): {}", writer.to_string(), var.to_string(), + debug!("{:?} defines {:?} (idx={}): {}", writer, var, idx, self.ln_str(writer)); } // Either read, write, or both depending on the acc bitset fn acc(&mut self, ln: LiveNode, var: Variable, acc: uint) { - debug!("{} accesses[{:x}] {}: {}", - ln.to_string(), acc, var.to_string(), self.ln_str(ln)); + debug!("{:?} accesses[{:x}] {:?}: {}", + ln, acc, var, self.ln_str(ln)); let idx = self.idx(ln, var); let user = &mut self.users[idx]; @@ -857,14 +858,14 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { |this| this.propagate_through_fn_block(decl, body)); // hack to skip the loop unless debug! is enabled: - debug!("^^ liveness computation results for body {} (entry={})", + debug!("^^ liveness computation results for body {} (entry={:?})", { for ln_idx in range(0u, self.ir.num_live_nodes) { - debug!("{}", self.ln_str(LiveNode(ln_idx))); + debug!("{:?}", self.ln_str(LiveNode(ln_idx))); } body.id }, - entry_ln.to_string()); + entry_ln); entry_ln } @@ -1132,7 +1133,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // Uninteresting cases: just propagate in rev exec order ast::ExprVec(ref exprs) => { - self.propagate_through_exprs(exprs[], succ) + self.propagate_through_exprs(exprs.index(&FullRange), succ) } ast::ExprRepeat(ref element, ref count) => { @@ -1149,32 +1150,31 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { ast::ExprCall(ref f, ref args) => { let diverges = !self.ir.tcx.is_method_call(expr.id) && { - let t_ret = ty::ty_fn_ret(ty::expr_ty_adjusted(self.ir.tcx, &**f)); - t_ret == ty::FnDiverging + ty::ty_fn_ret(ty::expr_ty_adjusted(self.ir.tcx, &**f)).diverges() }; let succ = if diverges { self.s.exit_ln } else { succ }; - let succ = self.propagate_through_exprs(args[], succ); + let succ = self.propagate_through_exprs(args.index(&FullRange), succ); self.propagate_through_expr(&**f, succ) } ast::ExprMethodCall(_, _, ref args) => { let method_call = ty::MethodCall::expr(expr.id); let method_ty = self.ir.tcx.method_map.borrow().get(&method_call).unwrap().ty; - let diverges = ty::ty_fn_ret(method_ty) == ty::FnDiverging; + let diverges = ty::ty_fn_ret(method_ty).diverges(); let succ = if diverges { self.s.exit_ln } else { succ }; - self.propagate_through_exprs(args[], succ) + self.propagate_through_exprs(args.index(&FullRange), succ) } ast::ExprTup(ref exprs) => { - self.propagate_through_exprs(exprs[], succ) + self.propagate_through_exprs(exprs.index(&FullRange), succ) } ast::ExprBinary(op, ref l, ref r) if ast_util::lazy_binop(op) => { @@ -1514,11 +1514,11 @@ fn check_fn(_v: &Liveness, } impl<'a, 'tcx> Liveness<'a, 'tcx> { - fn fn_ret(&self, id: NodeId) -> ty::FnOutput<'tcx> { + fn fn_ret(&self, id: NodeId) -> ty::PolyFnOutput<'tcx> { let fn_ty = ty::node_id_to_type(self.ir.tcx, id); match fn_ty.sty { ty::ty_unboxed_closure(closure_def_id, _, substs) => - self.ir.tcx.unboxed_closure_type(closure_def_id, substs).sig.0.output, + self.ir.tcx.unboxed_closure_type(closure_def_id, substs).sig.output(), _ => ty::ty_fn_ret(fn_ty), } @@ -1529,8 +1529,16 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { sp: Span, _fk: FnKind, entry_ln: LiveNode, - body: &ast::Block) { - match self.fn_ret(id) { + body: &ast::Block) + { + // within the fn body, late-bound regions are liberated: + let fn_ret = + ty::liberate_late_bound_regions( + self.ir.tcx, + CodeExtent::from_node_id(body.id), + &self.fn_ret(id)); + + match fn_ret { ty::FnConverging(t_ret) if self.live_on_entry(entry_ln, self.s.no_ret_var).is_some() => { diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 2b8c9b532e593..b29c24c586114 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -482,28 +482,20 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { Ok(self.cat_tup_field(expr, base_cmt, idx.node, expr_ty)) } - ast::ExprIndex(ref base, ref idx) => { - match idx.node { - ast::ExprRange(..) => { - // Slicing syntax special case (KILLME). - Ok(self.cat_rvalue_node(expr.id(), expr.span(), expr_ty)) + ast::ExprIndex(ref base, _) => { + let method_call = ty::MethodCall::expr(expr.id()); + match self.typer.node_method_ty(method_call) { + Some(method_ty) => { + // If this is an index implemented by a method call, then it + // will include an implicit deref of the result. + let ret_ty = self.overloaded_method_return_ty(method_ty); + self.cat_deref(expr, + self.cat_rvalue_node(expr.id(), + expr.span(), + ret_ty), 1, true) } - _ => { - let method_call = ty::MethodCall::expr(expr.id()); - match self.typer.node_method_ty(method_call) { - Some(method_ty) => { - // If this is an index implemented by a method call, then it will - // include an implicit deref of the result. - let ret_ty = ty::ty_fn_ret(method_ty).unwrap(); - self.cat_deref(expr, - self.cat_rvalue_node(expr.id(), - expr.span(), - ret_ty), 1, true) - } - None => { - self.cat_index(expr, try!(self.cat_expr(&**base))) - } - } + None => { + self.cat_index(expr, try!(self.cat_expr(&**base))) } } } @@ -547,7 +539,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { expr_ty: Ty<'tcx>, def: def::Def) -> McResult> { - debug!("cat_def: id={} expr={} def={}", + debug!("cat_def: id={} expr={} def={:?}", id, expr_ty.repr(self.tcx()), def); match def { @@ -594,7 +586,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { span, format!("Upvar of non-closure {} - {}", fn_node_id, - ty.repr(self.tcx()))[]); + ty.repr(self.tcx())).index(&FullRange)); } } } @@ -860,12 +852,14 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { }; let method_ty = self.typer.node_method_ty(method_call); - debug!("cat_deref: method_call={} method_ty={}", + debug!("cat_deref: method_call={:?} method_ty={:?}", method_call, method_ty.map(|ty| ty.repr(self.tcx()))); let base_cmt = match method_ty { Some(method_ty) => { - let ref_ty = ty::ty_fn_ret(method_ty).unwrap(); + let ref_ty = + ty::assert_no_late_bound_regions( + self.tcx(), &ty::ty_fn_ret(method_ty)).unwrap(); self.cat_rvalue_node(node.id(), node.span(), ref_ty) } None => base_cmt @@ -945,9 +939,12 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { let element_ty = match method_ty { Some(method_ty) => { - let ref_ty = ty::ty_fn_ret(method_ty).unwrap(); + let ref_ty = self.overloaded_method_return_ty(method_ty); base_cmt = self.cat_rvalue_node(elt.id(), elt.span(), ref_ty); - ty::ty_fn_args(method_ty)[0] + + // FIXME(#20649) -- why are we using the `self_ty` as the element type...? + let self_ty = ty::ty_fn_sig(method_ty).input(0); + ty::assert_no_late_bound_regions(self.tcx(), &self_ty) } None => { match ty::array_element_ty(self.tcx(), base_cmt.ty) { @@ -1269,6 +1266,19 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { Ok(()) } + + fn overloaded_method_return_ty(&self, + method_ty: Ty<'tcx>) + -> Ty<'tcx> + { + // When we process an overloaded `*` or `[]` etc, we often + // need to extract the return type of the method. These method + // types are generated by method resolution and always have + // all late-bound regions fully instantiated, so we just want + // to skip past the binder. + ty::assert_no_late_bound_regions(self.tcx(), &ty::ty_fn_ret(method_ty)) + .unwrap() // overloaded ops do not diverge, either + } } #[derive(Copy)] @@ -1455,7 +1465,7 @@ impl<'tcx> cmt_<'tcx> { impl<'tcx> Repr<'tcx> for cmt_<'tcx> { fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String { - format!("{{{} id:{} m:{} ty:{}}}", + format!("{{{} id:{} m:{:?} ty:{}}}", self.cat.repr(tcx), self.id, self.mutbl, @@ -1470,7 +1480,7 @@ impl<'tcx> Repr<'tcx> for categorization<'tcx> { cat_rvalue(..) | cat_local(..) | cat_upvar(..) => { - format!("{}", *self) + format!("{:?}", *self) } cat_deref(ref cmt, derefs, ptr) => { format!("{}-{}{}->", cmt.cat.repr(tcx), ptr_sigil(ptr), derefs) diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index 50e328ef0e3c3..861c4a2c85e35 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -435,28 +435,28 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { fn def_privacy(&self, did: ast::DefId) -> PrivacyResult { if !is_local(did) { if self.external_exports.contains(&did) { - debug!("privacy - {} was externally exported", did); + debug!("privacy - {:?} was externally exported", did); return Allowable; } - debug!("privacy - is {} a public method", did); + debug!("privacy - is {:?} a public method", did); return match self.tcx.impl_or_trait_items.borrow().get(&did) { Some(&ty::MethodTraitItem(ref meth)) => { - debug!("privacy - well at least it's a method: {}", + debug!("privacy - well at least it's a method: {:?}", *meth); match meth.container { ty::TraitContainer(id) => { - debug!("privacy - recursing on trait {}", id); + debug!("privacy - recursing on trait {:?}", id); self.def_privacy(id) } ty::ImplContainer(id) => { match ty::impl_trait_ref(self.tcx, id) { Some(t) => { - debug!("privacy - impl of trait {}", id); + debug!("privacy - impl of trait {:?}", id); self.def_privacy(t.def_id) } None => { - debug!("privacy - found a method {}", + debug!("privacy - found a method {:?}", meth.vis); if meth.vis == ast::Public { Allowable @@ -471,17 +471,17 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { Some(&ty::TypeTraitItem(ref typedef)) => { match typedef.container { ty::TraitContainer(id) => { - debug!("privacy - recursing on trait {}", id); + debug!("privacy - recursing on trait {:?}", id); self.def_privacy(id) } ty::ImplContainer(id) => { match ty::impl_trait_ref(self.tcx, id) { Some(t) => { - debug!("privacy - impl of trait {}", id); + debug!("privacy - impl of trait {:?}", id); self.def_privacy(t.def_id) } None => { - debug!("privacy - found a typedef {}", + debug!("privacy - found a typedef {:?}", typedef.vis); if typedef.vis == ast::Public { Allowable @@ -615,10 +615,10 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { match result { None => true, Some((span, msg, note)) => { - self.tcx.sess.span_err(span, msg[]); + self.tcx.sess.span_err(span, msg.index(&FullRange)); match note { Some((span, msg)) => { - self.tcx.sess.span_note(span, msg[]) + self.tcx.sess.span_note(span, msg.index(&FullRange)) } None => {}, } @@ -696,7 +696,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { let fields = ty::lookup_struct_fields(self.tcx, id); let field = match name { NamedField(ident) => { - debug!("privacy - check named field {} in struct {}", ident.name, id); + debug!("privacy - check named field {} in struct {:?}", ident.name, id); fields.iter().find(|f| f.name == ident.name).unwrap() } UnnamedField(idx) => &fields[idx] @@ -720,7 +720,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { UnnamedField(idx) => format!("field #{} of {} is private", idx + 1, struct_desc), }; - self.tcx.sess.span_err(span, msg[]); + self.tcx.sess.span_err(span, msg.index(&FullRange)); } // Given the ID of a method, checks to ensure it's in scope. @@ -742,7 +742,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { method_id, None, format!("method `{}`", - string)[])); + string).index(&FullRange))); } // Checks that a path is in scope. @@ -756,7 +756,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { self.ensure_public(span, def, Some(origdid), - format!("{} `{}`", tyname, name)[]) + format!("{} `{}`", tyname, name).index(&FullRange)) }; match self.last_private_map[path_id] { diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index 5736e3072862a..51602e88f9342 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -50,7 +50,7 @@ fn generics_require_inlining(generics: &ast::Generics) -> bool { // monomorphized or it was marked with `#[inline]`. This will only return // true for functions. fn item_might_be_inlined(item: &ast::Item) -> bool { - if attributes_specify_inlining(item.attrs[]) { + if attributes_specify_inlining(item.attrs.index(&FullRange)) { return true } @@ -65,7 +65,7 @@ fn item_might_be_inlined(item: &ast::Item) -> bool { fn method_might_be_inlined(tcx: &ty::ctxt, method: &ast::Method, impl_src: ast::DefId) -> bool { - if attributes_specify_inlining(method.attrs[]) || + if attributes_specify_inlining(method.attrs.index(&FullRange)) || generics_require_inlining(method.pe_generics()) { return true } @@ -202,7 +202,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { ast::MethodImplItem(ref method) => { if generics_require_inlining(method.pe_generics()) || attributes_specify_inlining( - method.attrs[]) { + method.attrs.index(&FullRange)) { true } else { let impl_did = self.tcx @@ -249,7 +249,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { None => { self.tcx.sess.bug(format!("found unmapped ID in worklist: \ {}", - search_item)[]) + search_item).index(&FullRange)) } } } @@ -341,7 +341,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { .bug(format!("found unexpected thingy in worklist: {}", self.tcx .map - .node_to_string(search_item))[]) + .node_to_string(search_item)).index(&FullRange)) } } } diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 68e257bc0c5ec..5d18843097f37 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -146,24 +146,24 @@ impl RegionMaps { None => {} } - debug!("relate_free_regions(sub={}, sup={})", sub, sup); + debug!("relate_free_regions(sub={:?}, sup={:?})", sub, sup); self.free_region_map.borrow_mut().insert(sub, vec!(sup)); } pub fn record_encl_scope(&self, sub: CodeExtent, sup: CodeExtent) { - debug!("record_encl_scope(sub={}, sup={})", sub, sup); + debug!("record_encl_scope(sub={:?}, sup={:?})", sub, sup); assert!(sub != sup); self.scope_map.borrow_mut().insert(sub, sup); } pub fn record_var_scope(&self, var: ast::NodeId, lifetime: CodeExtent) { - debug!("record_var_scope(sub={}, sup={})", var, lifetime); + debug!("record_var_scope(sub={:?}, sup={:?})", var, lifetime); assert!(var != lifetime.node_id()); self.var_map.borrow_mut().insert(var, lifetime); } pub fn record_rvalue_scope(&self, var: ast::NodeId, lifetime: CodeExtent) { - debug!("record_rvalue_scope(sub={}, sup={})", var, lifetime); + debug!("record_rvalue_scope(sub={:?}, sup={:?})", var, lifetime); assert!(var != lifetime.node_id()); self.rvalue_scopes.borrow_mut().insert(var, lifetime); } @@ -172,7 +172,7 @@ impl RegionMaps { /// e.g. by an expression like `a().f` -- they will be freed within the innermost terminating /// scope. pub fn mark_as_terminating_scope(&self, scope_id: CodeExtent) { - debug!("record_terminating_scope(scope_id={})", scope_id); + debug!("record_terminating_scope(scope_id={:?})", scope_id); self.terminating_scopes.borrow_mut().insert(scope_id); } @@ -186,7 +186,7 @@ impl RegionMaps { //! Returns the narrowest scope that encloses `id`, if any. match self.scope_map.borrow().get(&id) { Some(&r) => r, - None => { panic!("no enclosing scope for id {}", id); } + None => { panic!("no enclosing scope for id {:?}", id); } } } @@ -194,7 +194,7 @@ impl RegionMaps { pub fn var_scope(&self, var_id: ast::NodeId) -> CodeExtent { match self.var_map.borrow().get(&var_id) { Some(&r) => r, - None => { panic!("no enclosing scope for id {}", var_id); } + None => { panic!("no enclosing scope for id {:?}", var_id); } } } @@ -204,7 +204,7 @@ impl RegionMaps { // check for a designated rvalue scope match self.rvalue_scopes.borrow().get(&expr_id) { Some(&s) => { - debug!("temporary_scope({}) = {} [custom]", expr_id, s); + debug!("temporary_scope({:?}) = {:?} [custom]", expr_id, s); return Some(s); } None => { } @@ -225,12 +225,12 @@ impl RegionMaps { id = p; } None => { - debug!("temporary_scope({}) = None", expr_id); + debug!("temporary_scope({:?}) = None", expr_id); return None; } } } - debug!("temporary_scope({}) = {} [enclosing]", expr_id, id); + debug!("temporary_scope({:?}) = {:?} [enclosing]", expr_id, id); return Some(id); } @@ -238,7 +238,7 @@ impl RegionMaps { //! Returns the lifetime of the variable `id`. let scope = ty::ReScope(self.var_scope(id)); - debug!("var_region({}) = {}", id, scope); + debug!("var_region({:?}) = {:?}", id, scope); scope } @@ -258,7 +258,7 @@ impl RegionMaps { while superscope != s { match self.scope_map.borrow().get(&s) { None => { - debug!("is_subscope_of({}, {}, s={})=false", + debug!("is_subscope_of({:?}, {:?}, s={:?})=false", subscope, superscope, s); return false; @@ -267,7 +267,7 @@ impl RegionMaps { } } - debug!("is_subscope_of({}, {})=true", + debug!("is_subscope_of({:?}, {:?})=true", subscope, superscope); return true; @@ -287,7 +287,7 @@ impl RegionMaps { sub_region: ty::Region, super_region: ty::Region) -> bool { - debug!("is_subregion_of(sub_region={}, super_region={})", + debug!("is_subregion_of(sub_region={:?}, super_region={:?})", sub_region, super_region); sub_region == super_region || { @@ -365,7 +365,7 @@ impl RegionMaps { fn ancestors_of(this: &RegionMaps, scope: CodeExtent) -> Vec { - // debug!("ancestors_of(scope={})", scope); + // debug!("ancestors_of(scope={:?})", scope); let mut result = vec!(scope); let mut scope = scope; loop { @@ -376,7 +376,7 @@ impl RegionMaps { scope = superscope; } } - // debug!("ancestors_of_loop(scope={})", scope); + // debug!("ancestors_of_loop(scope={:?})", scope); } } } @@ -414,7 +414,7 @@ fn record_var_lifetime(visitor: &mut RegionResolutionVisitor, } fn resolve_block(visitor: &mut RegionResolutionVisitor, blk: &ast::Block) { - debug!("resolve_block(blk.id={})", blk.id); + debug!("resolve_block(blk.id={:?})", blk.id); // Record the parent of this block. record_superlifetime(visitor, blk.id, blk.span); @@ -466,7 +466,7 @@ fn resolve_pat(visitor: &mut RegionResolutionVisitor, pat: &ast::Pat) { fn resolve_stmt(visitor: &mut RegionResolutionVisitor, stmt: &ast::Stmt) { let stmt_id = stmt_id(stmt); - debug!("resolve_stmt(stmt.id={})", stmt_id); + debug!("resolve_stmt(stmt.id={:?})", stmt_id); let stmt_scope = CodeExtent::from_node_id(stmt_id); visitor.region_maps.mark_as_terminating_scope(stmt_scope); @@ -479,7 +479,7 @@ fn resolve_stmt(visitor: &mut RegionResolutionVisitor, stmt: &ast::Stmt) { } fn resolve_expr(visitor: &mut RegionResolutionVisitor, expr: &ast::Expr) { - debug!("resolve_expr(expr.id={})", expr.id); + debug!("resolve_expr(expr.id={:?})", expr.id); record_superlifetime(visitor, expr.id, expr.span); @@ -566,7 +566,7 @@ fn resolve_expr(visitor: &mut RegionResolutionVisitor, expr: &ast::Expr) { } fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &ast::Local) { - debug!("resolve_local(local.id={},local.init={})", + debug!("resolve_local(local.id={:?},local.init={:?})", local.id,local.init.is_some()); let blk_id = match visitor.cx.var_parent { @@ -643,7 +643,7 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &ast::Local) { // A, but the inner rvalues `a()` and `b()` have an extended lifetime // due to rule C. // - // FIXME(#6308) -- Note that `[]` patterns work more smoothly post-DST. + // FIXME(#6308) -- Note that `.index(&FullRange)` patterns work more smoothly post-DST. match local.init { Some(ref expr) => { @@ -815,10 +815,10 @@ fn resolve_fn(visitor: &mut RegionResolutionVisitor, body: &ast::Block, sp: Span, id: ast::NodeId) { - debug!("region::resolve_fn(id={}, \ - span={}, \ - body.id={}, \ - cx.parent={})", + debug!("region::resolve_fn(id={:?}, \ + span={:?}, \ + body.id={:?}, \ + cx.parent={:?})", id, visitor.sess.codemap().span_to_string(sp), body.id, diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 68cb8ca39b492..8e03d774b8124 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -223,7 +223,7 @@ impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> { fn visit_poly_trait_ref(&mut self, trait_ref: &ast::PolyTraitRef, _modifier: &ast::TraitBoundModifier) { - debug!("visit_poly_trait_ref trait_ref={}", trait_ref); + debug!("visit_poly_trait_ref trait_ref={:?}", trait_ref); self.with(LateScope(&trait_ref.bound_lifetimes, self.scope), |old_scope, this| { this.check_lifetime_defs(old_scope, &trait_ref.bound_lifetimes); @@ -250,9 +250,9 @@ impl<'a> LifetimeContext<'a> { scope: &wrap_scope, def_map: self.def_map, }; - debug!("entering scope {}", this.scope); + debug!("entering scope {:?}", this.scope); f(self.scope, &mut this); - debug!("exiting scope {}", this.scope); + debug!("exiting scope {:?}", this.scope); } /// Visits self by adding a scope and handling recursive walk over the contents with `walk`. @@ -281,7 +281,7 @@ impl<'a> LifetimeContext<'a> { { let referenced_idents = early_bound_lifetime_names(generics); - debug!("visit_early_late: referenced_idents={}", + debug!("visit_early_late: referenced_idents={:?}", referenced_idents); let (early, late): (Vec<_>, _) = generics.lifetimes.iter().cloned().partition( @@ -399,7 +399,7 @@ impl<'a> LifetimeContext<'a> { self.sess.span_err( lifetime_ref.span, format!("use of undeclared lifetime name `{}`", - token::get_name(lifetime_ref.name))[]); + token::get_name(lifetime_ref.name)).index(&FullRange)); } fn check_lifetime_defs(&mut self, old_scope: Scope, lifetimes: &Vec) { @@ -413,7 +413,7 @@ impl<'a> LifetimeContext<'a> { lifetime.lifetime.span, format!("illegal lifetime parameter name: `{}`", token::get_name(lifetime.lifetime.name)) - []); + .index(&FullRange)); } } @@ -427,7 +427,7 @@ impl<'a> LifetimeContext<'a> { format!("lifetime name `{}` declared twice in \ the same scope", token::get_name(lifetime_j.lifetime.name)) - []); + .index(&FullRange)); } } @@ -488,7 +488,7 @@ impl<'a> LifetimeContext<'a> { probably a bug in syntax::fold"); } - debug!("lifetime_ref={} id={} resolved to {}", + debug!("lifetime_ref={:?} id={:?} resolved to {:?}", lifetime_to_string(lifetime_ref), lifetime_ref.id, def); @@ -605,9 +605,9 @@ fn early_bound_lifetime_names(generics: &ast::Generics) -> Vec { impl<'a> fmt::Show for ScopeChain<'a> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { - EarlyScope(space, defs, _) => write!(fmt, "EarlyScope({}, {})", space, defs), - LateScope(defs, _) => write!(fmt, "LateScope({})", defs), - BlockScope(id, _) => write!(fmt, "BlockScope({})", id), + EarlyScope(space, defs, _) => write!(fmt, "EarlyScope({:?}, {:?})", space, defs), + LateScope(defs, _) => write!(fmt, "LateScope({:?})", defs), + BlockScope(id, _) => write!(fmt, "BlockScope({:?})", id), RootScope => write!(fmt, "RootScope"), } } diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 362d5fedaa376..359ad8d394129 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -161,7 +161,7 @@ pub fn lookup(tcx: &ty::ctxt, id: DefId) -> Option { // is this definition the implementation of a trait method? match ty::trait_item_of_item(tcx, id) { Some(ty::MethodTraitItemId(trait_method_id)) if trait_method_id != id => { - debug!("lookup: trait_method_id={}", trait_method_id); + debug!("lookup: trait_method_id={:?}", trait_method_id); return lookup(tcx, trait_method_id) } _ => {} @@ -182,7 +182,7 @@ pub fn lookup(tcx: &ty::ctxt, id: DefId) -> Option { // stability of the trait to determine the stability of any // unmarked impls for it. See FIXME above for more details. - debug!("lookup: trait_id={}", trait_id); + debug!("lookup: trait_id={:?}", trait_id); lookup(tcx, trait_id) } else { None diff --git a/src/librustc/middle/subst.rs b/src/librustc/middle/subst.rs index cd29ce28ac174..98bb0645befd1 100644 --- a/src/librustc/middle/subst.rs +++ b/src/librustc/middle/subst.rs @@ -242,7 +242,7 @@ impl fmt::Show for VecPerParamSpace { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { try!(write!(fmt, "VecPerParamSpace {{")); for space in ParamSpace::all().iter() { - try!(write!(fmt, "{}: {}, ", *space, self.get_slice(*space))); + try!(write!(fmt, "{:?}: {:?}, ", *space, self.get_slice(*space))); } try!(write!(fmt, "}}")); Ok(()) @@ -601,10 +601,10 @@ impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> { span, format!("Type parameter out of range \ when substituting in region {} (root type={}) \ - (space={}, index={})", + (space={:?}, index={})", region_name.as_str(), self.root_ty.repr(self.tcx()), - space, i)[]); + space, i).index(&FullRange)); } } } @@ -654,14 +654,14 @@ impl<'a,'tcx> SubstFolder<'a,'tcx> { let span = self.span.unwrap_or(DUMMY_SP); self.tcx().sess.span_bug( span, - format!("Type parameter `{}` ({}/{}/{}) out of range \ + format!("Type parameter `{}` ({}/{:?}/{}) out of range \ when substituting (root type={}) substs={}", p.repr(self.tcx()), source_ty.repr(self.tcx()), p.space, p.idx, self.root_ty.repr(self.tcx()), - self.substs.repr(self.tcx()))[]); + self.substs.repr(self.tcx())).index(&FullRange)); } }; @@ -711,7 +711,7 @@ impl<'a,'tcx> SubstFolder<'a,'tcx> { /// first case we do not increase the Debruijn index and in the second case we do. The reason /// is that only in the second case have we passed through a fn binder. fn shift_regions_through_binders(&self, ty: Ty<'tcx>) -> Ty<'tcx> { - debug!("shift_regions(ty={}, region_binders_passed={}, type_has_escaping_regions={})", + debug!("shift_regions(ty={:?}, region_binders_passed={:?}, type_has_escaping_regions={:?})", ty.repr(self.tcx()), self.region_binders_passed, ty::type_has_escaping_regions(ty)); if self.region_binders_passed == 0 || !ty::type_has_escaping_regions(ty) { @@ -719,7 +719,7 @@ impl<'a,'tcx> SubstFolder<'a,'tcx> { } let result = ty_fold::shift_regions(self.tcx(), self.region_binders_passed, &ty); - debug!("shift_regions: shifted result = {}", result.repr(self.tcx())); + debug!("shift_regions: shifted result = {:?}", result.repr(self.tcx())); result } diff --git a/src/librustc/middle/traits/coherence.rs b/src/librustc/middle/traits/coherence.rs index 42b6e54420b40..49c7d6aafaa5e 100644 --- a/src/librustc/middle/traits/coherence.rs +++ b/src/librustc/middle/traits/coherence.rs @@ -137,7 +137,7 @@ fn ty_is_local_constructor<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool { ty::ty_err => { tcx.sess.bug( format!("ty_is_local invoked on unexpected type: {}", - ty.repr(tcx))[]) + ty.repr(tcx)).index(&FullRange)) } } } diff --git a/src/librustc/middle/traits/error_reporting.rs b/src/librustc/middle/traits/error_reporting.rs index 59322fcc632e8..fd6773afb765d 100644 --- a/src/librustc/middle/traits/error_reporting.rs +++ b/src/librustc/middle/traits/error_reporting.rs @@ -339,5 +339,5 @@ pub fn suggest_new_overflow_limit(tcx: &ty::ctxt, span: Span) { span, format!( "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", - suggested_limit)[]); + suggested_limit).index(&FullRange)); } diff --git a/src/librustc/middle/traits/fulfill.rs b/src/librustc/middle/traits/fulfill.rs index c2327adece8e0..71a3ad64faf9b 100644 --- a/src/librustc/middle/traits/fulfill.rs +++ b/src/librustc/middle/traits/fulfill.rs @@ -227,7 +227,7 @@ impl<'tcx> FulfillmentContext<'tcx> { } pub fn pending_obligations(&self) -> &[PredicateObligation<'tcx>] { - self.predicates[] + self.predicates.index(&FullRange) } /// Attempts to select obligations using `selcx`. If `only_new_obligations` is true, then it @@ -437,11 +437,9 @@ fn register_region_obligation<'tcx>(tcx: &ty::ctxt<'tcx>, debug!("register_region_obligation({})", region_obligation.repr(tcx)); - let body_id = region_obligation.cause.body_id; - match region_obligations.entry(&body_id) { + match region_obligations.entry(region_obligation.cause.body_id) { Vacant(entry) => { entry.insert(vec![region_obligation]); }, Occupied(mut entry) => { entry.get_mut().push(region_obligation); }, } } - diff --git a/src/librustc/middle/traits/mod.rs b/src/librustc/middle/traits/mod.rs index ce926fd8d10de..3ef6694ce31c7 100644 --- a/src/librustc/middle/traits/mod.rs +++ b/src/librustc/middle/traits/mod.rs @@ -297,7 +297,7 @@ pub fn evaluate_builtin_bound<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, span: Span) -> SelectionResult<'tcx, ()> { - debug!("type_known_to_meet_builtin_bound(ty={}, bound={})", + debug!("type_known_to_meet_builtin_bound(ty={}, bound={:?})", ty.repr(infcx.tcx), bound); @@ -347,7 +347,7 @@ pub fn evaluate_builtin_bound<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, } }; - debug!("type_known_to_meet_builtin_bound: ty={} bound={} result={}", + debug!("type_known_to_meet_builtin_bound: ty={} bound={:?} result={:?}", ty.repr(infcx.tcx), bound, result); @@ -378,7 +378,7 @@ pub fn type_known_to_meet_builtin_bound<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, span, format!("overflow evaluating whether `{}` is `{}`", ty.user_string(infcx.tcx), - bound.user_string(infcx.tcx))[]); + bound.user_string(infcx.tcx)).as_slice()); suggest_new_overflow_limit(infcx.tcx, span); false } diff --git a/src/librustc/middle/traits/object_safety.rs b/src/librustc/middle/traits/object_safety.rs index 8880cb7ce733f..beb3340e3c44a 100644 --- a/src/librustc/middle/traits/object_safety.rs +++ b/src/librustc/middle/traits/object_safety.rs @@ -178,7 +178,7 @@ fn object_safety_violations_for_method<'tcx>(tcx: &ty::ctxt<'tcx>, // The `Self` type is erased, so it should not appear in list of // arguments or return type apart from the receiver. let ref sig = method.fty.sig; - for &input_ty in sig.0.inputs[1..].iter() { + for &input_ty in sig.0.inputs.index(&(1..)).iter() { if contains_illegal_self_type_reference(tcx, trait_def_id, input_ty) { return Some(MethodViolationCode::ReferencesSelf); } @@ -295,7 +295,7 @@ impl<'tcx> Repr<'tcx> for ObjectSafetyViolation<'tcx> { ObjectSafetyViolation::SizedSelf => format!("SizedSelf"), ObjectSafetyViolation::Method(ref m, code) => - format!("Method({},{})", m.repr(tcx), code), + format!("Method({},{:?})", m.repr(tcx), code), } } } diff --git a/src/librustc/middle/traits/project.rs b/src/librustc/middle/traits/project.rs index 65f7ad296db51..67a8508e60dd1 100644 --- a/src/librustc/middle/traits/project.rs +++ b/src/librustc/middle/traits/project.rs @@ -490,7 +490,7 @@ fn assemble_candidates_from_object_type<'cx,'tcx>( selcx.tcx().sess.span_bug( obligation.cause.span, format!("assemble_candidates_from_object_type called with non-object: {}", - object_ty.repr(selcx.tcx()))[]); + object_ty.repr(selcx.tcx())).as_slice()); } }; let projection_bounds = data.projection_bounds_with_self_ty(selcx.tcx(), object_ty); diff --git a/src/librustc/middle/traits/select.rs b/src/librustc/middle/traits/select.rs index 2393b7d733d00..25a33de1cc7f0 100644 --- a/src/librustc/middle/traits/select.rs +++ b/src/librustc/middle/traits/select.rs @@ -368,7 +368,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let result = self.evaluate_stack(&stack); - debug!("result: {}", result); + debug!("result: {:?}", result); result } @@ -903,7 +903,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let all_bounds = util::transitive_bounds( - self.tcx(), caller_trait_refs[]); + self.tcx(), caller_trait_refs.index(&FullRange)); let matching_bounds = all_bounds.filter( @@ -944,14 +944,14 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { _ => { return Ok(()); } }; - debug!("assemble_unboxed_candidates: self_ty={} kind={} obligation={}", + debug!("assemble_unboxed_candidates: self_ty={} kind={:?} obligation={}", self_ty.repr(self.tcx()), kind, obligation.repr(self.tcx())); let closure_kind = self.closure_typer.unboxed_closure_kind(closure_def_id); - debug!("closure_kind = {}", closure_kind); + debug!("closure_kind = {:?}", closure_kind); if closure_kind == kind { candidates.vec.push(UnboxedClosureCandidate(closure_def_id, substs.clone())); @@ -1102,7 +1102,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { Err(error) => EvaluatedToErr(error), } }); - debug!("winnow_candidate depth={} result={}", + debug!("winnow_candidate depth={} result={:?}", stack.obligation.recursion_depth, result); result } @@ -1467,7 +1467,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.bug( format!( "asked to assemble builtin bounds of unexpected type: {}", - self_ty.repr(self.tcx()))[]); + self_ty.repr(self.tcx())).index(&FullRange)); } }; @@ -1637,7 +1637,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.span_bug( obligation.cause.span, format!("builtin bound for {} was ambig", - obligation.repr(self.tcx()))[]); + obligation.repr(self.tcx())).index(&FullRange)); } } } @@ -1716,7 +1716,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let substs = self.rematch_impl(impl_def_id, obligation, snapshot, &skol_map, skol_obligation_trait_ref.trait_ref); - debug!("confirm_impl_candidate substs={}", substs); + debug!("confirm_impl_candidate substs={:?}", substs); Ok(self.vtable_impl(impl_def_id, substs, obligation.cause.clone(), obligation.recursion_depth + 1, skol_map, snapshot)) }) @@ -1816,7 +1816,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.span_bug( obligation.cause.span, format!("Fn pointer candidate for inappropriate self type: {}", - self_ty.repr(self.tcx()))[]); + self_ty.repr(self.tcx())).index(&FullRange)); } }; @@ -1946,7 +1946,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.bug( format!("Impl {} was matchable against {} but now is not", impl_def_id.repr(self.tcx()), - obligation.repr(self.tcx()))[]); + obligation.repr(self.tcx())).index(&FullRange)); } } } @@ -2225,7 +2225,7 @@ impl<'tcx> Repr<'tcx> for SelectionCandidate<'tcx> { fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String { match *self { ErrorCandidate => format!("ErrorCandidate"), - BuiltinCandidate(b) => format!("BuiltinCandidate({})", b), + BuiltinCandidate(b) => format!("BuiltinCandidate({:?})", b), ParamCandidate(ref a) => format!("ParamCandidate({})", a.repr(tcx)), ImplCandidate(a) => format!("ImplCandidate({})", a.repr(tcx)), ProjectionCandidate => format!("ProjectionCandidate"), @@ -2234,7 +2234,7 @@ impl<'tcx> Repr<'tcx> for SelectionCandidate<'tcx> { format!("ObjectCandidate") } UnboxedClosureCandidate(c, ref s) => { - format!("UnboxedClosureCandidate({},{})", c, s.repr(tcx)) + format!("UnboxedClosureCandidate({:?},{})", c, s.repr(tcx)) } } } diff --git a/src/librustc/middle/traits/util.rs b/src/librustc/middle/traits/util.rs index 229d34fe4237c..272447027afd8 100644 --- a/src/librustc/middle/traits/util.rs +++ b/src/librustc/middle/traits/util.rs @@ -238,7 +238,7 @@ pub fn fresh_substs_for_impl<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, impl<'tcx, N> fmt::Show for VtableImplData<'tcx, N> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "VtableImpl({})", self.impl_def_id) + write!(f, "VtableImpl({:?})", self.impl_def_id) } } @@ -451,8 +451,8 @@ impl<'tcx> Repr<'tcx> for super::FulfillmentErrorCode<'tcx> { impl<'tcx> fmt::Show for super::FulfillmentErrorCode<'tcx> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { - super::CodeSelectionError(ref e) => write!(f, "{}", e), - super::CodeProjectionError(ref e) => write!(f, "{}", e), + super::CodeSelectionError(ref e) => write!(f, "{:?}", e), + super::CodeProjectionError(ref e) => write!(f, "{:?}", e), super::CodeAmbiguity => write!(f, "Ambiguity") } } diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index c359233eca173..90716844fbe80 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -1047,13 +1047,17 @@ pub struct ClosureTy<'tcx> { pub abi: abi::Abi, } -#[derive(Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] pub enum FnOutput<'tcx> { FnConverging(Ty<'tcx>), FnDiverging } impl<'tcx> FnOutput<'tcx> { + pub fn diverges(&self) -> bool { + *self == FnDiverging + } + pub fn unwrap(self) -> Ty<'tcx> { match self { ty::FnConverging(t) => t, @@ -1062,6 +1066,14 @@ impl<'tcx> FnOutput<'tcx> { } } +pub type PolyFnOutput<'tcx> = Binder>; + +impl<'tcx> PolyFnOutput<'tcx> { + pub fn diverges(&self) -> bool { + self.0.diverges() + } +} + /// Signature of a function type, which I have arbitrarily /// decided to use to refer to the input/output types. /// @@ -1077,6 +1089,21 @@ pub struct FnSig<'tcx> { pub type PolyFnSig<'tcx> = Binder>; +impl<'tcx> PolyFnSig<'tcx> { + pub fn inputs(&self) -> ty::Binder>> { + ty::Binder(self.0.inputs.clone()) + } + pub fn input(&self, index: uint) -> ty::Binder> { + ty::Binder(self.0.inputs[index]) + } + pub fn output(&self) -> ty::Binder> { + ty::Binder(self.0.output.clone()) + } + pub fn variadic(&self) -> bool { + self.0.variadic + } +} + #[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] pub struct ParamTy { pub space: subst::ParamSpace, @@ -1699,8 +1726,7 @@ impl fmt::Show for RegionVid { impl<'tcx> fmt::Show for FnSig<'tcx> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // grr, without tcx not much we can do. - write!(f, "(...)") + write!(f, "({:?}; variadic: {})->{:?}", self.inputs, self.variadic, self.output) } } @@ -1710,8 +1736,8 @@ impl fmt::Show for InferTy { TyVar(ref v) => v.fmt(f), IntVar(ref v) => v.fmt(f), FloatVar(ref v) => v.fmt(f), - FreshTy(v) => write!(f, "FreshTy({})", v), - FreshIntTy(v) => write!(f, "FreshIntTy({})", v), + FreshTy(v) => write!(f, "FreshTy({:?})", v), + FreshIntTy(v) => write!(f, "FreshIntTy({:?})", v), } } } @@ -2006,8 +2032,8 @@ impl<'tcx> Predicate<'tcx> { /// struct Foo> { ... } /// /// Here, the `Generics` for `Foo` would contain a list of bounds like -/// `[[], [U:Bar]]`. Now if there were some particular reference -/// like `Foo`, then the `GenericBounds` would be `[[], +/// `[.index(&FullRange), [U:Bar]]`. Now if there were some particular reference +/// like `Foo`, then the `GenericBounds` would be `[.index(&FullRange), /// [uint:Bar]]`. #[derive(Clone, Show)] pub struct GenericBounds<'tcx> { @@ -2184,7 +2210,7 @@ impl<'a, 'tcx> ParameterEnvironment<'a, 'tcx> { _ => { cx.sess.bug(format!("ParameterEnvironment::from_item(): \ `{}` is not an item", - cx.map.node_to_string(id))[]) + cx.map.node_to_string(id)).index(&FullRange)) } } } @@ -2269,7 +2295,7 @@ impl UnboxedClosureKind { }; match result { Ok(trait_did) => trait_did, - Err(err) => cx.sess.fatal(err[]), + Err(err) => cx.sess.fatal(err.index(&FullRange)), } } } @@ -2302,12 +2328,12 @@ impl<'tcx> CommonTypes<'tcx> { bool: intern_ty(arena, interner, ty_bool), char: intern_ty(arena, interner, ty_char), err: intern_ty(arena, interner, ty_err), - int: intern_ty(arena, interner, ty_int(ast::TyI)), + int: intern_ty(arena, interner, ty_int(ast::TyIs)), i8: intern_ty(arena, interner, ty_int(ast::TyI8)), i16: intern_ty(arena, interner, ty_int(ast::TyI16)), i32: intern_ty(arena, interner, ty_int(ast::TyI32)), i64: intern_ty(arena, interner, ty_int(ast::TyI64)), - uint: intern_ty(arena, interner, ty_uint(ast::TyU)), + uint: intern_ty(arena, interner, ty_uint(ast::TyUs)), u8: intern_ty(arena, interner, ty_uint(ast::TyU8)), u16: intern_ty(arena, interner, ty_uint(ast::TyU16)), u32: intern_ty(arena, interner, ty_uint(ast::TyU32)), @@ -2471,7 +2497,7 @@ fn intern_ty<'tcx>(type_arena: &'tcx TypedArena>, region_depth: flags.depth, }); - debug!("Interned type: {} Pointer: {}", + debug!("Interned type: {:?} Pointer: {:?}", ty, ty as *const _); interner.insert(InternedTy { ty: ty }, ty); @@ -2590,7 +2616,7 @@ impl FlagComputation { } &ty_tup(ref ts) => { - self.add_tys(ts[]); + self.add_tys(ts.index(&FullRange)); } &ty_bare_fn(_, ref f) => { @@ -2613,7 +2639,7 @@ impl FlagComputation { fn add_fn_sig(&mut self, fn_sig: &PolyFnSig) { let mut computation = FlagComputation::new(); - computation.add_tys(fn_sig.0.inputs[]); + computation.add_tys(fn_sig.0.inputs.index(&FullRange)); if let ty::FnConverging(output) = fn_sig.0.output { computation.add_ty(output); @@ -2653,7 +2679,7 @@ impl FlagComputation { pub fn mk_mach_int<'tcx>(tcx: &ctxt<'tcx>, tm: ast::IntTy) -> Ty<'tcx> { match tm { - ast::TyI => tcx.types.int, + ast::TyIs => tcx.types.int, ast::TyI8 => tcx.types.i8, ast::TyI16 => tcx.types.i16, ast::TyI32 => tcx.types.i32, @@ -2663,7 +2689,7 @@ pub fn mk_mach_int<'tcx>(tcx: &ctxt<'tcx>, tm: ast::IntTy) -> Ty<'tcx> { pub fn mk_mach_uint<'tcx>(tcx: &ctxt<'tcx>, tm: ast::UintTy) -> Ty<'tcx> { match tm { - ast::TyU => tcx.types.uint, + ast::TyUs => tcx.types.uint, ast::TyU8 => tcx.types.u8, ast::TyU16 => tcx.types.u16, ast::TyU32 => tcx.types.u32, @@ -2782,7 +2808,7 @@ pub fn mk_trait<'tcx>(cx: &ctxt<'tcx>, fn bound_list_is_sorted(bounds: &[ty::PolyProjectionPredicate]) -> bool { bounds.len() == 0 || - bounds[1..].iter().enumerate().all( + bounds.index(&(1..)).iter().enumerate().all( |(index, bound)| bounds[index].sort_key() <= bound.sort_key()) } @@ -3037,7 +3063,7 @@ pub fn sequence_element_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { ty_str => mk_mach_uint(cx, ast::TyU8), ty_open(ty) => sequence_element_type(cx, ty), _ => cx.sess.bug(format!("sequence_element_type called on non-sequence value: {}", - ty_to_string(cx, ty))[]), + ty_to_string(cx, ty)).index(&FullRange)), } } @@ -3324,7 +3350,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents { let result = match ty.sty { // uint and int are ffi-unsafe - ty_uint(ast::TyU) | ty_int(ast::TyI) => { + ty_uint(ast::TyUs) | ty_int(ast::TyIs) => { TC::ReachesFfiUnsafe } @@ -3371,7 +3397,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents { ty_struct(did, substs) => { let flds = struct_fields(cx, did, substs); let mut res = - TypeContents::union(flds[], + TypeContents::union(flds.index(&FullRange), |f| tc_mt(cx, f.mt, cache)); if !lookup_repr_hints(cx, did).contains(&attr::ReprExtern) { @@ -3395,15 +3421,15 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents { } ty_tup(ref tys) => { - TypeContents::union(tys[], + TypeContents::union(tys.index(&FullRange), |ty| tc_ty(cx, *ty, cache)) } ty_enum(did, substs) => { let variants = substd_enum_variants(cx, did, substs); let mut res = - TypeContents::union(variants[], |variant| { - TypeContents::union(variant.args[], + TypeContents::union(variants.index(&FullRange), |variant| { + TypeContents::union(variant.args.index(&FullRange), |arg_ty| { tc_ty(cx, *arg_ty, cache) }) @@ -3533,7 +3559,7 @@ fn type_impls_bound<'a,'tcx>(param_env: &ParameterEnvironment<'a,'tcx>, match cache.borrow().get(&ty) { None => {} Some(&result) => { - debug!("type_impls_bound({}, {}) = {} (cached)", + debug!("type_impls_bound({}, {:?}) = {:?} (cached)", ty.repr(param_env.tcx), bound, result); @@ -3546,7 +3572,7 @@ fn type_impls_bound<'a,'tcx>(param_env: &ParameterEnvironment<'a,'tcx>, let is_impld = traits::type_known_to_meet_builtin_bound(&infcx, param_env, ty, bound, span); - debug!("type_impls_bound({}, {}) = {}", + debug!("type_impls_bound({}, {:?}) = {:?}", ty.repr(param_env.tcx), bound, is_impld); @@ -3585,13 +3611,13 @@ pub fn is_ffi_safe<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> bool { pub fn is_instantiable<'tcx>(cx: &ctxt<'tcx>, r_ty: Ty<'tcx>) -> bool { fn type_requires<'tcx>(cx: &ctxt<'tcx>, seen: &mut Vec, r_ty: Ty<'tcx>, ty: Ty<'tcx>) -> bool { - debug!("type_requires({}, {})?", + debug!("type_requires({:?}, {:?})?", ::util::ppaux::ty_to_string(cx, r_ty), ::util::ppaux::ty_to_string(cx, ty)); let r = r_ty == ty || subtypes_require(cx, seen, r_ty, ty); - debug!("type_requires({}, {})? {}", + debug!("type_requires({:?}, {:?})? {:?}", ::util::ppaux::ty_to_string(cx, r_ty), ::util::ppaux::ty_to_string(cx, ty), r); @@ -3600,7 +3626,7 @@ pub fn is_instantiable<'tcx>(cx: &ctxt<'tcx>, r_ty: Ty<'tcx>) -> bool { fn subtypes_require<'tcx>(cx: &ctxt<'tcx>, seen: &mut Vec, r_ty: Ty<'tcx>, ty: Ty<'tcx>) -> bool { - debug!("subtypes_require({}, {})?", + debug!("subtypes_require({:?}, {:?})?", ::util::ppaux::ty_to_string(cx, r_ty), ::util::ppaux::ty_to_string(cx, ty)); @@ -3655,7 +3681,8 @@ pub fn is_instantiable<'tcx>(cx: &ctxt<'tcx>, r_ty: Ty<'tcx>) -> bool { ty_unboxed_closure(..) => { // this check is run on type definitions, so we don't expect to see // inference by-products or unboxed closure types - cx.sess.bug(format!("requires check invoked on inapplicable type: {}", ty)[]) + cx.sess.bug(format!("requires check invoked on inapplicable type: {:?}", + ty).as_slice()) } ty_tup(ref ts) => { @@ -3680,7 +3707,7 @@ pub fn is_instantiable<'tcx>(cx: &ctxt<'tcx>, r_ty: Ty<'tcx>) -> bool { } }; - debug!("subtypes_require({}, {})? {}", + debug!("subtypes_require({:?}, {:?})? {:?}", ::util::ppaux::ty_to_string(cx, r_ty), ::util::ppaux::ty_to_string(cx, ty), r); @@ -3748,7 +3775,8 @@ pub fn is_type_representable<'tcx>(cx: &ctxt<'tcx>, sp: Span, ty: Ty<'tcx>) ty_unboxed_closure(..) => { // this check is run on type definitions, so we don't expect to see // unboxed closure types - cx.sess.bug(format!("requires check invoked on inapplicable type: {}", ty)[]) + cx.sess.bug(format!("requires check invoked on inapplicable type: {:?}", + ty).as_slice()) } _ => Representable, } @@ -3789,7 +3817,7 @@ pub fn is_type_representable<'tcx>(cx: &ctxt<'tcx>, sp: Span, ty: Ty<'tcx>) fn is_type_structurally_recursive<'tcx>(cx: &ctxt<'tcx>, sp: Span, seen: &mut Vec>, ty: Ty<'tcx>) -> Representability { - debug!("is_type_structurally_recursive: {}", + debug!("is_type_structurally_recursive: {:?}", ::util::ppaux::ty_to_string(cx, ty)); match ty.sty { @@ -3809,7 +3837,7 @@ pub fn is_type_representable<'tcx>(cx: &ctxt<'tcx>, sp: Span, ty: Ty<'tcx>) match iter.next() { Some(&seen_type) => { if same_struct_or_enum_def_id(seen_type, did) { - debug!("SelfRecursive: {} contains {}", + debug!("SelfRecursive: {:?} contains {:?}", ::util::ppaux::ty_to_string(cx, seen_type), ::util::ppaux::ty_to_string(cx, ty)); return SelfRecursive; @@ -3829,7 +3857,7 @@ pub fn is_type_representable<'tcx>(cx: &ctxt<'tcx>, sp: Span, ty: Ty<'tcx>) for &seen_type in iter { if same_type(ty, seen_type) { - debug!("ContainsRecursive: {} contains {}", + debug!("ContainsRecursive: {:?} contains {:?}", ::util::ppaux::ty_to_string(cx, seen_type), ::util::ppaux::ty_to_string(cx, ty)); return ContainsRecursive; @@ -3851,7 +3879,7 @@ pub fn is_type_representable<'tcx>(cx: &ctxt<'tcx>, sp: Span, ty: Ty<'tcx>) } } - debug!("is_type_representable: {}", + debug!("is_type_representable: {:?}", ::util::ppaux::ty_to_string(cx, ty)); // To avoid a stack overflow when checking an enum variant or struct that @@ -3859,7 +3887,7 @@ pub fn is_type_representable<'tcx>(cx: &ctxt<'tcx>, sp: Span, ty: Ty<'tcx>) // of seen types and check recursion for each of them (issues #3008, #3779). let mut seen: Vec = Vec::new(); let r = is_type_structurally_recursive(cx, sp, &mut seen, ty); - debug!("is_type_representable: {} is {}", + debug!("is_type_representable: {:?} is {:?}", ::util::ppaux::ty_to_string(cx, ty), r); r } @@ -3896,7 +3924,7 @@ pub fn type_is_fresh(ty: Ty) -> bool { pub fn type_is_uint(ty: Ty) -> bool { match ty.sty { - ty_infer(IntVar(_)) | ty_uint(ast::TyU) => true, + ty_infer(IntVar(_)) | ty_uint(ast::TyUs) => true, _ => false } } @@ -3942,7 +3970,7 @@ pub fn type_is_signed(ty: Ty) -> bool { pub fn type_is_machine(ty: Ty) -> bool { match ty.sty { - ty_int(ast::TyI) | ty_uint(ast::TyU) => false, + ty_int(ast::TyIs) | ty_uint(ast::TyUs) => false, ty_int(..) | ty_uint(..) | ty_float(..) => true, _ => false } @@ -3986,7 +4014,7 @@ pub fn close_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { match ty.sty { ty_open(ty) => mk_rptr(cx, cx.mk_region(ReStatic), mt {ty: ty, mutbl:ast::MutImmutable}), _ => cx.sess.bug(format!("Trying to close a non-open type {}", - ty_to_string(cx, ty))[]) + ty_to_string(cx, ty)).index(&FullRange)) } } @@ -4087,7 +4115,7 @@ pub fn node_id_to_trait_ref<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) Some(ty) => ty.clone(), None => cx.sess.bug( format!("node_id_to_trait_ref: no trait ref for node `{}`", - cx.map.node_to_string(id))[]) + cx.map.node_to_string(id)).index(&FullRange)) } } @@ -4100,7 +4128,7 @@ pub fn node_id_to_type<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) -> Ty<'tcx> { Some(ty) => ty, None => cx.sess.bug( format!("node_id_to_type: no type for node `{}`", - cx.map.node_to_string(id))[]) + cx.map.node_to_string(id)).index(&FullRange)) } } @@ -4122,7 +4150,7 @@ pub fn fn_is_variadic(fty: Ty) -> bool { match fty.sty { ty_bare_fn(_, ref f) => f.sig.0.variadic, ref s => { - panic!("fn_is_variadic() called on non-fn type: {}", s) + panic!("fn_is_variadic() called on non-fn type: {:?}", s) } } } @@ -4131,7 +4159,7 @@ pub fn ty_fn_sig<'tcx>(fty: Ty<'tcx>) -> &'tcx PolyFnSig<'tcx> { match fty.sty { ty_bare_fn(_, ref f) => &f.sig, ref s => { - panic!("ty_fn_sig() called on non-fn type: {}", s) + panic!("ty_fn_sig() called on non-fn type: {:?}", s) } } } @@ -4145,8 +4173,8 @@ pub fn ty_fn_abi(fty: Ty) -> abi::Abi { } // Type accessors for substructures of types -pub fn ty_fn_args<'tcx>(fty: Ty<'tcx>) -> &'tcx [Ty<'tcx>] { - ty_fn_sig(fty).0.inputs.as_slice() +pub fn ty_fn_args<'tcx>(fty: Ty<'tcx>) -> ty::Binder>> { + ty_fn_sig(fty).inputs() } pub fn ty_closure_store(fty: Ty) -> TraitStore { @@ -4157,16 +4185,16 @@ pub fn ty_closure_store(fty: Ty) -> TraitStore { UniqTraitStore } ref s => { - panic!("ty_closure_store() called on non-closure type: {}", s) + panic!("ty_closure_store() called on non-closure type: {:?}", s) } } } -pub fn ty_fn_ret<'tcx>(fty: Ty<'tcx>) -> FnOutput<'tcx> { +pub fn ty_fn_ret<'tcx>(fty: Ty<'tcx>) -> Binder> { match fty.sty { - ty_bare_fn(_, ref f) => f.sig.0.output, + ty_bare_fn(_, ref f) => f.sig.output(), ref s => { - panic!("ty_fn_ret() called on non-fn type: {}", s) + panic!("ty_fn_ret() called on non-fn type: {:?}", s) } } } @@ -4186,8 +4214,8 @@ pub fn ty_region(tcx: &ctxt, ref s => { tcx.sess.span_bug( span, - format!("ty_region() invoked on an inappropriate ty: {}", - s)[]); + format!("ty_region() invoked on an inappropriate ty: {:?}", + s).index(&FullRange)); } } } @@ -4246,13 +4274,13 @@ pub fn expr_span(cx: &ctxt, id: NodeId) -> Span { e.span } Some(f) => { - cx.sess.bug(format!("Node id {} is not an expr: {}", + cx.sess.bug(format!("Node id {} is not an expr: {:?}", id, - f)[]); + f).index(&FullRange)); } None => { cx.sess.bug(format!("Node id {} is not present \ - in the node map", id)[]); + in the node map", id).index(&FullRange)); } } } @@ -4266,16 +4294,16 @@ pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString { } _ => { cx.sess.bug( - format!("Variable id {} maps to {}, not local", + format!("Variable id {} maps to {:?}, not local", id, - pat)[]); + pat).index(&FullRange)); } } } r => { - cx.sess.bug(format!("Variable id {} maps to {}, not local", + cx.sess.bug(format!("Variable id {} maps to {:?}, not local", id, - r)[]); + r).index(&FullRange)); } } } @@ -4297,7 +4325,7 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>, return match adjustment { Some(adjustment) => { match *adjustment { - AdjustReifyFnPointer(_) => { + AdjustReifyFnPointer(_) => { match unadjusted_ty.sty { ty::ty_bare_fn(Some(_), b) => { ty::mk_bare_fn(cx, None, b) @@ -4305,8 +4333,8 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>, ref b => { cx.sess.bug( format!("AdjustReifyFnPointer adjustment on non-fn-item: \ - {}", - b)[]); + {:?}", + b).index(&FullRange)); } } } @@ -4319,9 +4347,12 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>, let method_call = MethodCall::autoderef(expr_id, i); match method_type(method_call) { Some(method_ty) => { - if let ty::FnConverging(result_type) = ty_fn_ret(method_ty) { - adjusted_ty = result_type; - } + // overloaded deref operators have all late-bound + // regions fully instantiated and coverge + let fn_ret = + ty::assert_no_late_bound_regions(cx, + &ty_fn_ret(method_ty)); + adjusted_ty = fn_ret.unwrap(); } None => {} } @@ -4334,7 +4365,7 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>, {}", i, ty_to_string(cx, adjusted_ty)) - []); + .index(&FullRange)); } } } @@ -4396,8 +4427,8 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>, mk_vec(cx, ty, None) } _ => cx.sess.span_bug(span, - format!("UnsizeLength with bad sty: {}", - ty_to_string(cx, ty))[]) + format!("UnsizeLength with bad sty: {:?}", + ty_to_string(cx, ty)).index(&FullRange)) }, &UnsizeStruct(box ref k, tp_index) => match ty.sty { ty_struct(did, substs) => { @@ -4408,8 +4439,8 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>, mk_struct(cx, did, cx.mk_substs(unsized_substs)) } _ => cx.sess.span_bug(span, - format!("UnsizeStruct with bad sty: {}", - ty_to_string(cx, ty))[]) + format!("UnsizeStruct with bad sty: {:?}", + ty_to_string(cx, ty)).index(&FullRange)) }, &UnsizeVtable(TyTrait { ref principal, ref bounds }, _) => { mk_trait(cx, principal.clone(), bounds.clone()) @@ -4422,7 +4453,7 @@ pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> def::Def { Some(&def) => def, None => { tcx.sess.span_bug(expr.span, format!( - "no def-map entry for expr {}", expr.id)[]); + "no def-map entry for expr {}", expr.id).index(&FullRange)); } } } @@ -4515,9 +4546,9 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind { def => { tcx.sess.span_bug( expr.span, - format!("uncategorized def for expr {}: {}", + format!("uncategorized def for expr {}: {:?}", expr.id, - def)[]); + def).index(&FullRange)); } } } @@ -4638,11 +4669,11 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field]) let mut i = 0u; for f in fields.iter() { if f.name == name { return i; } i += 1u; } tcx.sess.bug(format!( - "no field named `{}` found in the list of fields `{}`", + "no field named `{}` found in the list of fields `{:?}`", token::get_name(name), fields.iter() .map(|f| token::get_name(f.name).get().to_string()) - .collect::>())[]); + .collect::>()).index(&FullRange)); } pub fn impl_or_trait_item_idx(id: ast::Name, trait_items: &[ImplOrTraitItem]) @@ -4715,18 +4746,18 @@ pub fn type_err_to_str<'tcx>(cx: &ctxt<'tcx>, err: &type_err<'tcx>) -> String { terr_mismatch => "types differ".to_string(), terr_unsafety_mismatch(values) => { format!("expected {} fn, found {} fn", - values.expected.to_string(), - values.found.to_string()) + values.expected, + values.found) } terr_abi_mismatch(values) => { format!("expected {} fn, found {} fn", - values.expected.to_string(), - values.found.to_string()) + values.expected, + values.found) } terr_onceness_mismatch(values) => { format!("expected {} fn, found {} fn", - values.expected.to_string(), - values.found.to_string()) + values.expected, + values.found) } terr_sigil_mismatch(values) => { format!("expected {}, found {}", @@ -4818,14 +4849,14 @@ pub fn type_err_to_str<'tcx>(cx: &ctxt<'tcx>, err: &type_err<'tcx>) -> String { "expected an integral type, found `char`".to_string() } terr_int_mismatch(ref values) => { - format!("expected `{}`, found `{}`", - values.expected.to_string(), - values.found.to_string()) + format!("expected `{:?}`, found `{:?}`", + values.expected, + values.found) } terr_float_mismatch(ref values) => { - format!("expected `{}`, found `{}`", - values.expected.to_string(), - values.found.to_string()) + format!("expected `{:?}`, found `{:?}`", + values.expected, + values.found) } terr_variadic_mismatch(ref values) => { format!("expected {} fn, found {} function", @@ -4897,7 +4928,7 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) match item.node { ItemTrait(_, _, _, ref ms) => { let (_, p) = - ast_util::split_trait_methods(ms[]); + ast_util::split_trait_methods(ms.index(&FullRange)); p.iter() .map(|m| { match impl_or_trait_item( @@ -4914,16 +4945,16 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) }).collect() } _ => { - cx.sess.bug(format!("provided_trait_methods: `{}` is \ + cx.sess.bug(format!("provided_trait_methods: `{:?}` is \ not a trait", - id)[]) + id).index(&FullRange)) } } } _ => { - cx.sess.bug(format!("provided_trait_methods: `{}` is not a \ + cx.sess.bug(format!("provided_trait_methods: `{:?}` is not a \ trait", - id)[]) + id).index(&FullRange)) } } } else { @@ -4950,7 +4981,7 @@ fn lookup_locally_or_in_crate_store(descr: &str, } if def_id.krate == ast::LOCAL_CRATE { - panic!("No def'n found for {} in tcx.{}", def_id, descr); + panic!("No def'n found for {:?} in tcx.{}", def_id, descr); } let v = load_external(); map.insert(def_id, v.clone()); @@ -5057,7 +5088,7 @@ pub fn impl_trait_ref<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) -> Option>> { memoized(&cx.impl_trait_cache, id, |id: ast::DefId| { if id.krate == ast::LOCAL_CRATE { - debug!("(impl_trait_ref) searching for trait impl {}", id); + debug!("(impl_trait_ref) searching for trait impl {:?}", id); match cx.map.find(id.node) { Some(ast_map::NodeItem(item)) => { match item.node { @@ -5143,7 +5174,9 @@ impl<'tcx> VariantInfo<'tcx> { match ast_variant.node.kind { ast::TupleVariantKind(ref args) => { let arg_tys = if args.len() > 0 { - ty_fn_args(ctor_ty).iter().map(|a| *a).collect() + // the regions in the argument types come from the + // enum def'n, and hence will all be early bound + ty::assert_no_late_bound_regions(cx, &ty_fn_args(ctor_ty)) } else { Vec::new() }; @@ -5159,8 +5192,7 @@ impl<'tcx> VariantInfo<'tcx> { }; }, ast::StructVariantKind(ref struct_def) => { - - let fields: &[StructField] = struct_def.fields[]; + let fields: &[StructField] = struct_def.fields.index(&FullRange); assert!(fields.len() > 0); @@ -5311,7 +5343,7 @@ pub fn enum_variants<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) cx.sess .span_err(e.span, format!("expected constant: {}", - *err)[]); + *err).index(&FullRange)); } }, None => {} @@ -5377,7 +5409,7 @@ pub fn predicates_for_trait_ref<'tcx>(tcx: &ctxt<'tcx>, { let trait_def = lookup_trait_def(tcx, trait_ref.def_id()); - debug!("bounds_for_trait_ref(trait_def={}, trait_ref={})", + debug!("bounds_for_trait_ref(trait_def={:?}, trait_ref={:?})", trait_def.repr(tcx), trait_ref.repr(tcx)); // The interaction between HRTB and supertraits is not entirely @@ -5584,7 +5616,7 @@ pub fn lookup_field_type<'tcx>(tcx: &ctxt<'tcx>, node_id_to_type(tcx, id.node) } else { let mut tcache = tcx.tcache.borrow_mut(); - let pty = tcache.entry(&id).get().unwrap_or_else( + let pty = tcache.entry(id).get().unwrap_or_else( |vacant_entry| vacant_entry.insert(csearch::get_field_type(tcx, struct_id, id))); pty.ty }; @@ -5601,7 +5633,7 @@ pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec { _ => { cx.sess.bug( format!("ID not mapped to struct fields: {}", - cx.map.node_to_string(did.node))[]); + cx.map.node_to_string(did.node)).index(&FullRange)); } } } else { @@ -5634,7 +5666,7 @@ pub fn struct_fields<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId, substs: &Substs<'tc pub fn tup_fields<'tcx>(v: &[Ty<'tcx>]) -> Vec> { v.iter().enumerate().map(|(i, &f)| { field { - name: token::intern(i.to_string()[]), + name: token::intern(i.to_string().index(&FullRange)), mt: mt { ty: f, mutbl: MutImmutable @@ -5791,40 +5823,6 @@ pub fn is_binopable<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>, op: ast::BinOp) -> bool return tbl[tycat(cx, ty) as uint ][opcat(op) as uint]; } -/// Returns an equivalent type with all the typedefs and self regions removed. -pub fn normalize_ty<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { - let u = TypeNormalizer(cx).fold_ty(ty); - return u; - - struct TypeNormalizer<'a, 'tcx: 'a>(&'a ctxt<'tcx>); - - impl<'a, 'tcx> TypeFolder<'tcx> for TypeNormalizer<'a, 'tcx> { - fn tcx(&self) -> &ctxt<'tcx> { let TypeNormalizer(c) = *self; c } - - fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - match self.tcx().normalized_cache.borrow().get(&ty).cloned() { - None => {} - Some(u) => return u - } - - let t_norm = ty_fold::super_fold_ty(self, ty); - self.tcx().normalized_cache.borrow_mut().insert(ty, t_norm); - return t_norm; - } - - fn fold_region(&mut self, _: ty::Region) -> ty::Region { - ty::ReStatic - } - - fn fold_substs(&mut self, - substs: &subst::Substs<'tcx>) - -> subst::Substs<'tcx> { - subst::Substs { regions: subst::ErasedRegions, - types: substs.types.fold_with(self) } - } - } -} - // Returns the repeat count for a repeating vector expression. pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint { match const_eval::eval_const_expr_partial(tcx, count_expr) { @@ -5845,7 +5843,7 @@ pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint { }; tcx.sess.span_err(count_expr.span, format!( "expected positive integer for repeat count, found {}", - found)[]); + found).index(&FullRange)); } Err(_) => { let found = match count_expr.node { @@ -5860,7 +5858,7 @@ pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint { }; tcx.sess.span_err(count_expr.span, format!( "expected constant integer for repeat count, found {}", - found)[]); + found).index(&FullRange)); } } 0 @@ -5929,7 +5927,7 @@ pub fn required_region_bounds<'tcx>(tcx: &ctxt<'tcx>, predicates: Vec>) -> Vec { - debug!("required_region_bounds(erased_self_ty={}, predicates={})", + debug!("required_region_bounds(erased_self_ty={:?}, predicates={:?})", erased_self_ty.repr(tcx), predicates.repr(tcx)); @@ -6007,7 +6005,7 @@ pub fn populate_implementations_for_type_if_necessary(tcx: &ctxt, return } - debug!("populate_implementations_for_type_if_necessary: searching for {}", type_id); + debug!("populate_implementations_for_type_if_necessary: searching for {:?}", type_id); let mut inherent_impls = Vec::new(); csearch::each_implementation_for_type(&tcx.sess.cstore, type_id, @@ -6204,7 +6202,7 @@ pub fn hash_crate_independent<'tcx>(tcx: &ctxt<'tcx>, ty: Ty<'tcx>, svh: &Svh) - mt.mutbl.hash(state); }; let fn_sig = |&: state: &mut sip::SipState, sig: &Binder>| { - let sig = anonymize_late_bound_regions(tcx, sig); + let sig = anonymize_late_bound_regions(tcx, sig).0; for a in sig.inputs.iter() { helper(tcx, *a, svh, state); } if let ty::FnConverging(output) = sig.output { helper(tcx, output, svh, state); @@ -6265,7 +6263,7 @@ pub fn hash_crate_independent<'tcx>(tcx: &ctxt<'tcx>, ty: Ty<'tcx>, svh: &Svh) - did(state, data.principal_def_id()); hash!(data.bounds); - let principal = anonymize_late_bound_regions(tcx, &data.principal); + let principal = anonymize_late_bound_regions(tcx, &data.principal).0; for subty in principal.substs.types.iter() { helper(tcx, *subty, svh, state); } @@ -6368,7 +6366,7 @@ pub fn construct_parameter_environment<'a,'tcx>( record_region_bounds(tcx, &bounds); - debug!("construct_parameter_environment: free_id={} free_subst={} bounds={}", + debug!("construct_parameter_environment: free_id={:?} free_subst={:?} bounds={:?}", free_id, free_substs.repr(tcx), bounds.repr(tcx)); @@ -6394,15 +6392,15 @@ pub fn construct_parameter_environment<'a,'tcx>( types: &mut VecPerParamSpace>, defs: &[TypeParameterDef<'tcx>]) { for def in defs.iter() { - debug!("construct_parameter_environment(): push_types_from_defs: def={}", + debug!("construct_parameter_environment(): push_types_from_defs: def={:?}", def.repr(tcx)); let ty = ty::mk_param_from_def(tcx, def); types.push(def.space, ty); - } + } } fn record_region_bounds<'tcx>(tcx: &ty::ctxt<'tcx>, bounds: &GenericBounds<'tcx>) { - debug!("record_region_bounds(bounds={})", bounds.repr(tcx)); + debug!("record_region_bounds(bounds={:?})", bounds.repr(tcx)); for predicate in bounds.predicates.iter() { match *predicate { @@ -6644,7 +6642,7 @@ pub fn with_freevars(tcx: &ty::ctxt, fid: ast::NodeId, f: F) -> T where { match tcx.freevars.borrow().get(&fid) { None => f(&[]), - Some(d) => f(d[]) + Some(d) => f(d.index(&FullRange)) } } @@ -6696,6 +6694,16 @@ pub fn binds_late_bound_regions<'tcx, T>( count_late_bound_regions(tcx, value) > 0 } +pub fn assert_no_late_bound_regions<'tcx, T>( + tcx: &ty::ctxt<'tcx>, + value: &Binder) + -> T + where T : TypeFoldable<'tcx> + Repr<'tcx> + Clone +{ + assert!(!binds_late_bound_regions(tcx, value)); + value.0.clone() +} + /// Replace any late-bound regions bound in `value` with `'static`. Useful in trans but also /// method lookup and a few other places where precise region relationships are not required. pub fn erase_late_bound_regions<'tcx, T>( @@ -6718,14 +6726,14 @@ pub fn erase_late_bound_regions<'tcx, T>( pub fn anonymize_late_bound_regions<'tcx, T>( tcx: &ctxt<'tcx>, sig: &Binder) - -> T + -> Binder where T : TypeFoldable<'tcx> + Repr<'tcx>, { let mut counter = 0; - replace_late_bound_regions(tcx, sig, |_, db| { + ty::Binder(replace_late_bound_regions(tcx, sig, |_, db| { counter += 1; ReLateBound(db, BrAnon(counter)) - }).0 + }).0) } /// Replaces the late-bound-regions in `value` that are bound by `value`. @@ -6747,7 +6755,7 @@ pub fn replace_late_bound_regions<'tcx, T, F>( debug!("region={}", region.repr(tcx)); match region { ty::ReLateBound(debruijn, br) if debruijn.depth == current_depth => { - * map.entry(&br).get().unwrap_or_else( + * map.entry(br).get().unwrap_or_else( |vacant_entry| vacant_entry.insert(mapf(br, debruijn))) } _ => { @@ -6756,7 +6764,7 @@ pub fn replace_late_bound_regions<'tcx, T, F>( } }); - debug!("resulting map: {} value: {}", map, value.repr(tcx)); + debug!("resulting map: {:?} value: {:?}", map, value.repr(tcx)); (value, map) } @@ -6804,7 +6812,7 @@ impl<'tcx> Repr<'tcx> for AutoRef<'tcx> { fn repr(&self, tcx: &ctxt<'tcx>) -> String { match *self { AutoPtr(a, b, ref c) => { - format!("AutoPtr({},{},{})", a.repr(tcx), b, c.repr(tcx)) + format!("AutoPtr({},{:?},{})", a.repr(tcx), b, c.repr(tcx)) } AutoUnsize(ref a) => { format!("AutoUnsize({})", a.repr(tcx)) @@ -6813,7 +6821,7 @@ impl<'tcx> Repr<'tcx> for AutoRef<'tcx> { format!("AutoUnsizeUniq({})", a.repr(tcx)) } AutoUnsafe(ref a, ref b) => { - format!("AutoUnsafe({},{})", a, b.repr(tcx)) + format!("AutoUnsafe({:?},{})", a, b.repr(tcx)) } } } @@ -6843,7 +6851,7 @@ impl<'tcx> Repr<'tcx> for vtable_origin<'tcx> { fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String { match *self { vtable_static(def_id, ref tys, ref vtable_res) => { - format!("vtable_static({}:{}, {}, {})", + format!("vtable_static({:?}:{}, {}, {})", def_id, ty::item_path_str(tcx, def_id), tys.repr(tcx), @@ -6851,11 +6859,11 @@ impl<'tcx> Repr<'tcx> for vtable_origin<'tcx> { } vtable_param(x, y) => { - format!("vtable_param({}, {})", x, y) + format!("vtable_param({:?}, {})", x, y) } vtable_unboxed_closure(def_id) => { - format!("vtable_unboxed_closure({})", def_id) + format!("vtable_unboxed_closure({:?})", def_id) } vtable_error => { @@ -7286,7 +7294,7 @@ impl ReferencesError for Region impl<'tcx> Repr<'tcx> for ClosureTy<'tcx> { fn repr(&self, tcx: &ctxt<'tcx>) -> String { - format!("ClosureTy({},{},{},{},{},{})", + format!("ClosureTy({},{},{:?},{},{},{})", self.unsafety, self.onceness, self.store, diff --git a/src/librustc/middle/ty_fold.rs b/src/librustc/middle/ty_fold.rs index 1bc0d70945899..dadbae9349fc3 100644 --- a/src/librustc/middle/ty_fold.rs +++ b/src/librustc/middle/ty_fold.rs @@ -844,7 +844,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for RegionFolder<'a, 'tcx> _ => { debug!("RegionFolder.fold_region({}) folding free region (current_depth={})", r.repr(self.tcx()), self.current_depth); - self.fld_r.call_mut((r, self.current_depth)) + (self.fld_r)(r, self.current_depth) } } } @@ -868,6 +868,9 @@ impl<'a, 'tcx> TypeFolder<'tcx> for RegionEraser<'a, 'tcx> { fn tcx(&self) -> &ty::ctxt<'tcx> { self.tcx } fn fold_region(&mut self, r: ty::Region) -> ty::Region { + // because whether or not a region is bound affects subtyping, + // we can't erase the bound/free distinction, but we can + // replace all free regions with 'static match r { ty::ReLateBound(..) | ty::ReEarlyBound(..) => r, _ => ty::ReStatic diff --git a/src/librustc/middle/ty_walk.rs b/src/librustc/middle/ty_walk.rs index 4953e9a2ce1d1..28975c7341659 100644 --- a/src/librustc/middle/ty_walk.rs +++ b/src/librustc/middle/ty_walk.rs @@ -95,7 +95,7 @@ impl<'tcx> Iterator for TypeWalker<'tcx> { type Item = Ty<'tcx>; fn next(&mut self) -> Option> { - debug!("next(): stack={}", self.stack); + debug!("next(): stack={:?}", self.stack); match self.stack.pop() { None => { return None; @@ -103,7 +103,7 @@ impl<'tcx> Iterator for TypeWalker<'tcx> { Some(ty) => { self.last_subtree = self.stack.len(); self.push_subtypes(ty); - debug!("next: stack={}", self.stack); + debug!("next: stack={:?}", self.stack); Some(ty) } } diff --git a/src/librustc/plugin/load.rs b/src/librustc/plugin/load.rs index 44a223954858a..a38298d52dd87 100644 --- a/src/librustc/plugin/load.rs +++ b/src/librustc/plugin/load.rs @@ -223,17 +223,17 @@ impl<'a> PluginLoader<'a> { // this is fatal: there are almost certainly macros we need // inside this crate, so continue would spew "macro undefined" // errors - Err(err) => self.sess.span_fatal(vi.span, err[]) + Err(err) => self.sess.span_fatal(vi.span, err.index(&FullRange)) }; unsafe { let registrar = - match lib.symbol(symbol[]) { + match lib.symbol(symbol.index(&FullRange)) { Ok(registrar) => { mem::transmute::<*mut u8,PluginRegistrarFun>(registrar) } // again fatal if we can't register macros - Err(err) => self.sess.span_fatal(vi.span, err[]) + Err(err) => self.sess.span_fatal(vi.span, err.index(&FullRange)) }; // Intentionally leak the dynamic library. We can't ever unload it diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 138f648049c73..4968066f7b696 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -330,7 +330,7 @@ pub fn debugging_opts_map() -> Vec<(&'static str, &'static str, u64)> { ("parse-only", "Parse only; do not compile, assemble, or link", PARSE_ONLY), ("no-trans", "Run all passes except translation; no output", NO_TRANS), ("no-analysis", "Parse and expand the source, but run no analysis and", - NO_TRANS), + NO_ANALYSIS), ("unstable-options", "Adds unstable command line options to rustc interface", UNSTABLE_OPTIONS), ("print-enum-sizes", "Print the size of enums and their variants", PRINT_ENUM_SIZES), @@ -559,17 +559,17 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions match (value, opt_type_desc) { (Some(..), None) => { early_error(format!("codegen option `{}` takes no \ - value", key)[]) + value", key).index(&FullRange)) } (None, Some(type_desc)) => { early_error(format!("codegen option `{0}` requires \ {1} (-C {0}=)", - key, type_desc)[]) + key, type_desc).index(&FullRange)) } (Some(value), Some(type_desc)) => { early_error(format!("incorrect value `{}` for codegen \ option `{}` - {} was expected", - value, key, type_desc)[]) + value, key, type_desc).index(&FullRange)) } (None, None) => unreachable!() } @@ -579,7 +579,7 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions } if !found { early_error(format!("unknown codegen option: `{}`", - key)[]); + key).index(&FullRange)); } } return cg; @@ -592,10 +592,10 @@ pub fn default_lib_output() -> CrateType { pub fn default_configuration(sess: &Session) -> ast::CrateConfig { use syntax::parse::token::intern_and_get_ident as intern; - let end = sess.target.target.target_endian[]; - let arch = sess.target.target.arch[]; - let wordsz = sess.target.target.target_word_size[]; - let os = sess.target.target.target_os[]; + let end = sess.target.target.target_endian.index(&FullRange); + let arch = sess.target.target.arch.index(&FullRange); + let wordsz = sess.target.target.target_word_size.index(&FullRange); + let os = sess.target.target.target_os.index(&FullRange); let fam = match sess.target.target.options.is_like_windows { true => InternedString::new("windows"), @@ -631,23 +631,23 @@ pub fn build_configuration(sess: &Session) -> ast::CrateConfig { append_configuration(&mut user_cfg, InternedString::new("test")) } let mut v = user_cfg.into_iter().collect::>(); - v.push_all(default_cfg[]); + v.push_all(default_cfg.index(&FullRange)); v } pub fn build_target_config(opts: &Options, sp: &SpanHandler) -> Config { - let target = match Target::search(opts.target_triple[]) { + let target = match Target::search(opts.target_triple.index(&FullRange)) { Ok(t) => t, Err(e) => { - sp.handler().fatal((format!("Error loading target specification: {}", e))[]); + sp.handler().fatal((format!("Error loading target specification: {}", e)).as_slice()); } }; - let (int_type, uint_type) = match target.target_word_size[] { + let (int_type, uint_type) = match target.target_word_size.index(&FullRange) { "32" => (ast::TyI32, ast::TyU32), "64" => (ast::TyI64, ast::TyU64), w => sp.handler().fatal((format!("target specification was invalid: unrecognized \ - target-word-size {}", w))[]) + target-word-size {}", w)).index(&FullRange)) }; Config { @@ -845,7 +845,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let unparsed_crate_types = matches.opt_strs("crate-type"); let crate_types = parse_crate_types_from_list(unparsed_crate_types) - .unwrap_or_else(|e| early_error(e[])); + .unwrap_or_else(|e| early_error(e.index(&FullRange))); let mut lint_opts = vec!(); let mut describe_lints = false; @@ -873,7 +873,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { } if this_bit == 0 { early_error(format!("unknown debug flag: {}", - *debug_flag)[]) + *debug_flag).index(&FullRange)) } debugging_opts |= this_bit; } @@ -918,7 +918,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { "dep-info" => OutputTypeDepInfo, _ => { early_error(format!("unknown emission type: `{}`", - part)[]) + part).index(&FullRange)) } }; output_types.push(output_type) @@ -957,7 +957,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some(arg) => { early_error(format!("optimization level needs to be \ between 0-3 (instead was `{}`)", - arg)[]); + arg).index(&FullRange)); } } } else { @@ -995,7 +995,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some(arg) => { early_error(format!("debug info level needs to be between \ 0-2 (instead was `{}`)", - arg)[]); + arg).index(&FullRange)); } } } else { @@ -1013,7 +1013,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let mut search_paths = SearchPaths::new(); for s in matches.opt_strs("L").iter() { - search_paths.add_path(s[]); + search_paths.add_path(s.index(&FullRange)); } let libs = matches.opt_strs("l").into_iter().map(|s| { @@ -1027,7 +1027,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { s => { early_error(format!("unknown library kind `{}`, expected \ one of dylib, framework, or static", - s)[]); + s).as_slice()); } }; return (name.to_string(), kind) @@ -1045,7 +1045,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { (_, s) => { early_error(format!("unknown library kind `{}`, expected \ one of dylib, framework, or static", - s)[]); + s).index(&FullRange)); } }; (name.to_string(), kind) @@ -1089,7 +1089,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { --debuginfo"); } - let color = match matches.opt_str("color").as_ref().map(|s| s[]) { + let color = match matches.opt_str("color").as_ref().map(|s| s.index(&FullRange)) { Some("auto") => Auto, Some("always") => Always, Some("never") => Never, @@ -1099,7 +1099,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some(arg) => { early_error(format!("argument for --color must be auto, always \ or never (instead was `{}`)", - arg)[]) + arg).index(&FullRange)) } }; @@ -1115,7 +1115,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { None => early_error("--extern value must be of the format `foo=bar`"), }; - match externs.entry(&name.to_string()) { + match externs.entry(name.to_string()) { Vacant(entry) => { entry.insert(vec![location.to_string()]); }, Occupied(mut entry) => { entry.get_mut().push(location.to_string()); }, } @@ -1201,7 +1201,7 @@ mod test { #[test] fn test_switch_implies_cfg_test() { let matches = - &match getopts(&["--test".to_string()], optgroups()[]) { + &match getopts(&["--test".to_string()], optgroups().index(&FullRange)) { Ok(m) => m, Err(f) => panic!("test_switch_implies_cfg_test: {}", f) }; @@ -1209,7 +1209,7 @@ mod test { let sessopts = build_session_options(matches); let sess = build_session(sessopts, None, registry); let cfg = build_configuration(&sess); - assert!((attr::contains_name(cfg[], "test"))); + assert!((attr::contains_name(cfg.index(&FullRange), "test"))); } // When the user supplies --test and --cfg test, don't implicitly add @@ -1218,7 +1218,7 @@ mod test { fn test_switch_implies_cfg_test_unless_cfg_test() { let matches = &match getopts(&["--test".to_string(), "--cfg=test".to_string()], - optgroups()[]) { + optgroups().index(&FullRange)) { Ok(m) => m, Err(f) => { panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f) @@ -1238,7 +1238,7 @@ mod test { { let matches = getopts(&[ "-Awarnings".to_string() - ], optgroups()[]).unwrap(); + ], optgroups().index(&FullRange)).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); @@ -1249,7 +1249,7 @@ mod test { let matches = getopts(&[ "-Awarnings".to_string(), "-Dwarnings".to_string() - ], optgroups()[]).unwrap(); + ], optgroups().index(&FullRange)).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); @@ -1259,7 +1259,7 @@ mod test { { let matches = getopts(&[ "-Adead_code".to_string() - ], optgroups()[]).unwrap(); + ], optgroups().index(&FullRange)).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index abb780615ae7b..94a6bca4e06e5 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -174,7 +174,7 @@ impl Session { // cases later on pub fn impossible_case(&self, sp: Span, msg: &str) -> ! { self.span_bug(sp, - format!("impossible case reached: {}", msg)[]); + format!("impossible case reached: {}", msg).index(&FullRange)); } pub fn verbose(&self) -> bool { self.debugging_opt(config::VERBOSE) } pub fn time_passes(&self) -> bool { self.debugging_opt(config::TIME_PASSES) } @@ -216,7 +216,7 @@ impl Session { } pub fn target_filesearch(&self, kind: PathKind) -> filesearch::FileSearch { filesearch::FileSearch::new(self.sysroot(), - self.opts.target_triple[], + self.opts.target_triple.index(&FullRange), &self.opts.search_paths, kind) } diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index 7d2a8509cb510..26f98e28a8d95 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -64,7 +64,7 @@ pub fn indent(op: F) -> R where // to make debug output more readable. debug!(">>"); let r = op(); - debug!("<< (Result = {})", r); + debug!("<< (Result = {:?})", r); r } diff --git a/src/librustc/util/lev_distance.rs b/src/librustc/util/lev_distance.rs index 79bd0d4e306e4..8f5820d92c589 100644 --- a/src/librustc/util/lev_distance.rs +++ b/src/librustc/util/lev_distance.rs @@ -48,7 +48,7 @@ fn test_lev_distance() { for c in range(0u32, MAX as u32) .filter_map(|i| from_u32(i)) .map(|i| i.to_string()) { - assert_eq!(lev_distance(c[], c[]), 0); + assert_eq!(lev_distance(c.index(&FullRange), c.index(&FullRange)), 0); } let a = "\nMäry häd ä little lämb\n\nLittle lämb\n"; diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 8c2a9993004d9..2d433369366eb 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -55,12 +55,12 @@ pub fn note_and_explain_region(cx: &ctxt, (ref str, Some(span)) => { cx.sess.span_note( span, - format!("{}{}{}", prefix, *str, suffix)[]); + format!("{}{}{}", prefix, *str, suffix).index(&FullRange)); Some(span) } (ref str, None) => { cx.sess.note( - format!("{}{}{}", prefix, *str, suffix)[]); + format!("{}{}{}", prefix, *str, suffix).index(&FullRange)); None } } @@ -111,7 +111,7 @@ pub fn explain_region_and_span(cx: &ctxt, region: ty::Region) } Some(_) | None => { // this really should not happen - (format!("unknown scope: {}. Please report a bug.", scope), None) + (format!("unknown scope: {:?}. Please report a bug.", scope), None) } } } @@ -140,7 +140,7 @@ pub fn explain_region_and_span(cx: &ctxt, region: ty::Region) } Some(_) | None => { // this really should not happen - (format!("{} unknown free region bounded by scope {}", prefix, fr.scope), None) + (format!("{} unknown free region bounded by scope {:?}", prefix, fr.scope), None) } } } @@ -156,7 +156,7 @@ pub fn explain_region_and_span(cx: &ctxt, region: ty::Region) // I believe these cases should not occur (except when debugging, // perhaps) ty::ReInfer(_) | ty::ReLateBound(..) => { - (format!("lifetime {}", region), None) + (format!("lifetime {:?}", region), None) } }; @@ -271,7 +271,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { }; if abi != abi::Rust { - s.push_str(format!("extern {} ", abi.to_string())[]); + s.push_str(format!("extern {} ", abi.to_string()).index(&FullRange)); }; s.push_str("fn"); @@ -290,7 +290,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { Some(def_id) => { s.push_str(" {"); let path_str = ty::item_path_str(cx, def_id); - s.push_str(path_str[]); + s.push_str(path_str.index(&FullRange)); s.push_str("}"); } None => { } @@ -305,7 +305,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { match cty.store { ty::UniqTraitStore => {} ty::RegionTraitStore(region, _) => { - s.push_str(region_to_string(cx, "", true, region)[]); + s.push_str(region_to_string(cx, "", true, region).index(&FullRange)); } } @@ -324,7 +324,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { assert_eq!(cty.onceness, ast::Once); s.push_str("proc"); push_sig_to_string(cx, &mut s, '(', ')', &cty.sig, - bounds_str[]); + bounds_str.index(&FullRange)); } ty::RegionTraitStore(..) => { match cty.onceness { @@ -332,7 +332,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { ast::Once => s.push_str("once ") } push_sig_to_string(cx, &mut s, '|', '|', &cty.sig, - bounds_str[]); + bounds_str.index(&FullRange)); } } @@ -365,7 +365,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { ty::FnConverging(t) => { if !ty::type_is_nil(t) { s.push_str(" -> "); - s.push_str(ty_to_string(cx, t)[]); + s.push_str(ty_to_string(cx, t).index(&FullRange)); } } ty::FnDiverging => { @@ -402,7 +402,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { } ty_rptr(r, ref tm) => { let mut buf = region_ptr_to_string(cx, *r); - buf.push_str(mt_to_string(cx, tm)[]); + buf.push_str(mt_to_string(cx, tm).index(&FullRange)); buf } ty_open(typ) => @@ -412,7 +412,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { .iter() .map(|elem| ty_to_string(cx, *elem)) .collect::>(); - match strs[] { + match strs.index(&FullRange) { [ref string] => format!("({},)", string), strs => format!("({})", strs.connect(", ")) } @@ -541,7 +541,7 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, 0 }; - for t in tps[..tps.len() - num_defaults].iter() { + for t in tps.index(&(0..(tps.len() - num_defaults))).iter() { strs.push(ty_to_string(cx, *t)) } @@ -549,11 +549,11 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, format!("{}({}){}", base, if strs[0].starts_with("(") && strs[0].ends_with(",)") { - strs[0][1 .. strs[0].len() - 2] // Remove '(' and ',)' + strs[0].index(&(1 .. (strs[0].len() - 2))) // Remove '(' and ',)' } else if strs[0].starts_with("(") && strs[0].ends_with(")") { - strs[0][1 .. strs[0].len() - 1] // Remove '(' and ')' + strs[0].index(&(1 .. (strs[0].len() - 1))) // Remove '(' and ')' } else { - strs[0][] + strs[0].index(&FullRange) }, if &*strs[1] == "()" { String::new() } else { format!(" -> {}", strs[1]) }) } else if strs.len() > 0 { @@ -566,7 +566,7 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String { let mut s = typ.repr(cx).to_string(); if s.len() >= 32u { - s = s[0u..32u].to_string(); + s = s.index(&(0u..32u)).to_string(); } return s; } @@ -631,7 +631,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for [T] { impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice { fn repr(&self, tcx: &ctxt<'tcx>) -> String { - repr_vec(tcx, self[]) + repr_vec(tcx, self.index(&FullRange)) } } @@ -639,7 +639,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice { // autoderef cannot convert the &[T] handler impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec { fn repr(&self, tcx: &ctxt<'tcx>) -> String { - repr_vec(tcx, self[]) + repr_vec(tcx, self.index(&FullRange)) } } @@ -653,13 +653,13 @@ impl<'tcx, T:UserString<'tcx>> UserString<'tcx> for Vec { impl<'tcx> Repr<'tcx> for def::Def { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", *self) + format!("{:?}", *self) } } impl<'tcx> Repr<'tcx> for ty::TypeParameterDef<'tcx> { fn repr(&self, tcx: &ctxt<'tcx>) -> String { - format!("TypeParameterDef({}, {}, {}/{})", + format!("TypeParameterDef({:?}, {}, {:?}/{})", self.def_id, self.bounds.repr(tcx), self.space, @@ -854,7 +854,7 @@ impl<'tcx> Repr<'tcx> for ty::Region { fn repr(&self, tcx: &ctxt) -> String { match *self { ty::ReEarlyBound(id, space, index, name) => { - format!("ReEarlyBound({}, {}, {}, {})", + format!("ReEarlyBound({}, {:?}, {}, {})", id, space, index, @@ -862,7 +862,7 @@ impl<'tcx> Repr<'tcx> for ty::Region { } ty::ReLateBound(binder_id, ref bound_region) => { - format!("ReLateBound({}, {})", + format!("ReLateBound({:?}, {})", binder_id, bound_region.repr(tcx)) } @@ -870,7 +870,7 @@ impl<'tcx> Repr<'tcx> for ty::Region { ty::ReFree(ref fr) => fr.repr(tcx), ty::ReScope(id) => { - format!("ReScope({})", id) + format!("ReScope({:?})", id) } ty::ReStatic => { @@ -878,7 +878,7 @@ impl<'tcx> Repr<'tcx> for ty::Region { } ty::ReInfer(ReVar(ref vid)) => { - format!("{}", vid) + format!("{:?}", vid) } ty::ReInfer(ReSkolemized(id, ref bound_region)) => { @@ -920,14 +920,14 @@ impl<'tcx> Repr<'tcx> for ast::DefId { Some(ast_map::NodeVariant(..)) | Some(ast_map::NodeStructCtor(..)) => { return format!( - "{}:{}", + "{:?}:{}", *self, ty::item_path_str(tcx, *self)) } _ => {} } } - return format!("{}", *self) + return format!("{:?}", *self) } } @@ -1007,13 +1007,13 @@ impl<'tcx> Repr<'tcx> for ast::Ident { impl<'tcx> Repr<'tcx> for ast::ExplicitSelf_ { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", *self) + format!("{:?}", *self) } } impl<'tcx> Repr<'tcx> for ast::Visibility { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", *self) + format!("{:?}", *self) } } @@ -1026,6 +1026,7 @@ impl<'tcx> Repr<'tcx> for ty::BareFnTy<'tcx> { } } + impl<'tcx> Repr<'tcx> for ty::FnSig<'tcx> { fn repr(&self, tcx: &ctxt<'tcx>) -> String { format!("fn{} -> {}", self.inputs.repr(tcx), self.output.repr(tcx)) @@ -1096,7 +1097,7 @@ impl<'tcx> Repr<'tcx> for ty::TraitStore { impl<'tcx> Repr<'tcx> for ty::BuiltinBound { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", *self) + format!("{:?}", *self) } } @@ -1251,13 +1252,13 @@ impl<'tcx> Repr<'tcx> for ty::UpvarId { impl<'tcx> Repr<'tcx> for ast::Mutability { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", *self) + format!("{:?}", *self) } } impl<'tcx> Repr<'tcx> for ty::BorrowKind { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", *self) + format!("{:?}", *self) } } @@ -1271,49 +1272,49 @@ impl<'tcx> Repr<'tcx> for ty::UpvarBorrow { impl<'tcx> Repr<'tcx> for ty::IntVid { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", self) + format!("{:?}", self) } } impl<'tcx> Repr<'tcx> for ty::FloatVid { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", self) + format!("{:?}", self) } } impl<'tcx> Repr<'tcx> for ty::RegionVid { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", self) + format!("{:?}", self) } } impl<'tcx> Repr<'tcx> for ty::TyVid { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", self) + format!("{:?}", self) } } impl<'tcx> Repr<'tcx> for ty::IntVarValue { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", *self) + format!("{:?}", *self) } } impl<'tcx> Repr<'tcx> for ast::IntTy { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", *self) + format!("{:?}", *self) } } impl<'tcx> Repr<'tcx> for ast::UintTy { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", *self) + format!("{:?}", *self) } } impl<'tcx> Repr<'tcx> for ast::FloatTy { fn repr(&self, _tcx: &ctxt) -> String { - format!("{}", *self) + format!("{:?}", *self) } } @@ -1332,7 +1333,7 @@ impl<'tcx> UserString<'tcx> for ParamTy { impl<'tcx> Repr<'tcx> for ParamTy { fn repr(&self, tcx: &ctxt) -> String { let ident = self.user_string(tcx); - format!("{}/{}.{}", ident, self.space, self.idx) + format!("{}/{:?}.{}", ident, self.space, self.idx) } } @@ -1349,6 +1350,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for ty::Binder { } } +#[old_impl_check] impl<'tcx, S, H, K, V> Repr<'tcx> for HashMap where K : Hash + Eq + Repr<'tcx>, V : Repr<'tcx>, diff --git a/src/librustc/util/snapshot_vec.rs b/src/librustc/util/snapshot_vec.rs index 11820c908eeba..6040f55ceeba0 100644 --- a/src/librustc/util/snapshot_vec.rs +++ b/src/librustc/util/snapshot_vec.rs @@ -116,7 +116,7 @@ impl> SnapshotVec { pub fn actions_since_snapshot(&self, snapshot: &Snapshot) -> &[UndoLog] { - self.undo_log[snapshot.length..] + self.undo_log.index(&(snapshot.length..)) } fn assert_open_snapshot(&self, snapshot: &Snapshot) { diff --git a/src/librustc_back/archive.rs b/src/librustc_back/archive.rs index 0bd4265e487a6..48004acaac087 100644 --- a/src/librustc_back/archive.rs +++ b/src/librustc_back/archive.rs @@ -53,7 +53,7 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, args: &str, cwd: Option<&Path>, paths: &[&Path]) -> ProcessOutput { let ar = match *maybe_ar_prog { - Some(ref ar) => ar[], + Some(ref ar) => ar.index(&FullRange), None => "ar" }; let mut cmd = Command::new(ar); @@ -64,7 +64,7 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, match cwd { Some(p) => { cmd.cwd(p); - debug!("inside {}", p.display()); + debug!("inside {:?}", p.display()); } None => {} } @@ -75,22 +75,22 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, if !o.status.success() { handler.err(format!("{} failed with: {}", cmd, - o.status)[]); + o.status).index(&FullRange)); handler.note(format!("stdout ---\n{}", str::from_utf8(o.output - []).unwrap()) - []); + .index(&FullRange)).unwrap()) + .index(&FullRange)); handler.note(format!("stderr ---\n{}", str::from_utf8(o.error - []).unwrap()) - []); + .index(&FullRange)).unwrap()) + .index(&FullRange)); handler.abort_if_errors(); } o }, Err(e) => { - handler.err(format!("could not exec `{}`: {}", ar[], - e)[]); + handler.err(format!("could not exec `{}`: {}", ar.index(&FullRange), + e).index(&FullRange)); handler.abort_if_errors(); panic!("rustc::back::archive::run_ar() should not reach this point"); } @@ -105,17 +105,17 @@ pub fn find_library(name: &str, osprefix: &str, ossuffix: &str, let unixlibname = format!("lib{}.a", name); for path in search_paths.iter() { - debug!("looking for {} inside {}", name, path.display()); - let test = path.join(oslibname[]); + debug!("looking for {} inside {:?}", name, path.display()); + let test = path.join(oslibname.index(&FullRange)); if test.exists() { return test } if oslibname != unixlibname { - let test = path.join(unixlibname[]); + let test = path.join(unixlibname.index(&FullRange)); if test.exists() { return test } } } handler.fatal(format!("could not find native static library `{}`, \ perhaps an -L flag is missing?", - name)[]); + name).index(&FullRange)); } impl<'a> Archive<'a> { @@ -147,7 +147,7 @@ impl<'a> Archive<'a> { /// Lists all files in an archive pub fn files(&self) -> Vec { let output = run_ar(self.handler, &self.maybe_ar_prog, "t", None, &[&self.dst]); - let output = str::from_utf8(output.output[]).unwrap(); + let output = str::from_utf8(output.output.index(&FullRange)).unwrap(); // use lines_any because windows delimits output with `\r\n` instead of // just `\n` output.lines_any().map(|s| s.to_string()).collect() @@ -179,9 +179,9 @@ impl<'a> ArchiveBuilder<'a> { /// search in the relevant locations for a library named `name`. pub fn add_native_library(&mut self, name: &str) -> io::IoResult<()> { let location = find_library(name, - self.archive.slib_prefix[], - self.archive.slib_suffix[], - self.archive.lib_search_paths[], + self.archive.slib_prefix.index(&FullRange), + self.archive.slib_suffix.index(&FullRange), + self.archive.lib_search_paths.index(&FullRange), self.archive.handler); self.add_archive(&location, name, |_| false) } @@ -197,12 +197,12 @@ impl<'a> ArchiveBuilder<'a> { // as simple comparison is not enough - there // might be also an extra name suffix let obj_start = format!("{}", name); - let obj_start = obj_start[]; + let obj_start = obj_start.index(&FullRange); // Ignoring all bytecode files, no matter of // name let bc_ext = ".bytecode.deflate"; - self.add_archive(rlib, name[], |fname: &str| { + self.add_archive(rlib, name.index(&FullRange), |fname: &str| { let skip_obj = lto && fname.starts_with(obj_start) && fname.ends_with(".o"); skip_obj || fname.ends_with(bc_ext) || fname == METADATA_FILENAME @@ -239,7 +239,7 @@ impl<'a> ArchiveBuilder<'a> { // allow running `ar s file.a` to update symbols only. if self.should_update_symbols { run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - "s", Some(self.work_dir.path()), args[]); + "s", Some(self.work_dir.path()), args.index(&FullRange)); } return self.archive; } @@ -259,7 +259,7 @@ impl<'a> ArchiveBuilder<'a> { // Add the archive members seen so far, without updating the // symbol table (`S`). run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - "cruS", Some(self.work_dir.path()), args[]); + "cruS", Some(self.work_dir.path()), args.index(&FullRange)); args.clear(); args.push(&abs_dst); @@ -274,7 +274,7 @@ impl<'a> ArchiveBuilder<'a> { // necessary. let flags = if self.should_update_symbols { "crus" } else { "cruS" }; run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - flags, Some(self.work_dir.path()), args[]); + flags, Some(self.work_dir.path()), args.index(&FullRange)); self.archive } @@ -316,7 +316,7 @@ impl<'a> ArchiveBuilder<'a> { } else { filename }; - let new_filename = self.work_dir.path().join(filename[]); + let new_filename = self.work_dir.path().join(filename.index(&FullRange)); try!(fs::rename(file, &new_filename)); self.members.push(Path::new(filename)); } diff --git a/src/librustc_back/lib.rs b/src/librustc_back/lib.rs index 238c84e88a9e0..ca39477fbdcb2 100644 --- a/src/librustc_back/lib.rs +++ b/src/librustc_back/lib.rs @@ -28,22 +28,11 @@ #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] - -#![allow(unknown_features)] -#![feature(globs, phase, macro_rules, slicing_syntax)] -#![feature(unboxed_closures)] -#![feature(old_orphan_check)] - -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate log; - -#[cfg(not(stage0))] -#[macro_use] -extern crate log; +#![feature(slicing_syntax)] extern crate syntax; extern crate serialize; +#[macro_use] extern crate log; pub mod abi; pub mod archive; diff --git a/src/librustc_back/rpath.rs b/src/librustc_back/rpath.rs index 955081a3af64c..db1dfa6b6eed8 100644 --- a/src/librustc_back/rpath.rs +++ b/src/librustc_back/rpath.rs @@ -44,15 +44,15 @@ pub fn get_rpath_flags(config: RPathConfig) -> Vec where l.map(|p| p.clone()) }).collect::>(); - let rpaths = get_rpaths(config, libs[]); - flags.push_all(rpaths_to_flags(rpaths[])[]); + let rpaths = get_rpaths(config, libs.index(&FullRange)); + flags.push_all(rpaths_to_flags(rpaths.index(&FullRange)).index(&FullRange)); flags } fn rpaths_to_flags(rpaths: &[String]) -> Vec { let mut ret = Vec::new(); for rpath in rpaths.iter() { - ret.push(format!("-Wl,-rpath,{}", (*rpath)[])); + ret.push(format!("-Wl,-rpath,{}", (*rpath).index(&FullRange))); } return ret; } @@ -61,10 +61,10 @@ fn get_rpaths(mut config: RPathConfig, libs: &[Path]) -> Vec F: FnOnce() -> Path, G: FnMut(&Path) -> Result, { - debug!("output: {}", config.out_filename.display()); + debug!("output: {:?}", config.out_filename.display()); debug!("libs:"); for libpath in libs.iter() { - debug!(" {}", libpath.display()); + debug!(" {:?}", libpath.display()); } // Use relative paths to the libraries. Binaries can be moved @@ -82,14 +82,14 @@ fn get_rpaths(mut config: RPathConfig, libs: &[Path]) -> Vec } } - log_rpaths("relative", rel_rpaths[]); - log_rpaths("fallback", fallback_rpaths[]); + log_rpaths("relative", rel_rpaths.index(&FullRange)); + log_rpaths("fallback", fallback_rpaths.index(&FullRange)); let mut rpaths = rel_rpaths; - rpaths.push_all(fallback_rpaths[]); + rpaths.push_all(fallback_rpaths.index(&FullRange)); // Remove duplicates - let rpaths = minimize_rpaths(rpaths[]); + let rpaths = minimize_rpaths(rpaths.index(&FullRange)); return rpaths; } @@ -140,7 +140,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec { let mut set = HashSet::new(); let mut minimized = Vec::new(); for rpath in rpaths.iter() { - if set.insert(rpath[]) { + if set.insert(rpath.index(&FullRange)) { minimized.push(rpath.clone()); } } diff --git a/src/librustc_back/sha2.rs b/src/librustc_back/sha2.rs index d606c5158d0f7..f33971a6ac0c2 100644 --- a/src/librustc_back/sha2.rs +++ b/src/librustc_back/sha2.rs @@ -140,7 +140,7 @@ impl FixedBuffer for FixedBuffer64 { if input.len() >= buffer_remaining { copy_memory( self.buffer.slice_mut(self.buffer_idx, size), - input[..buffer_remaining]); + input.index(&(0..buffer_remaining))); self.buffer_idx = 0; func(&self.buffer); i += buffer_remaining; @@ -156,7 +156,7 @@ impl FixedBuffer for FixedBuffer64 { // While we have at least a full buffer size chunk's worth of data, process that data // without copying it into the buffer while input.len() - i >= size { - func(input[i..i + size]); + func(input.index(&(i..(i + size)))); i += size; } @@ -166,7 +166,7 @@ impl FixedBuffer for FixedBuffer64 { let input_remaining = input.len() - i; copy_memory( self.buffer.slice_to_mut(input_remaining), - input[i..]); + input.index(&(i..))); self.buffer_idx += input_remaining; } @@ -188,7 +188,7 @@ impl FixedBuffer for FixedBuffer64 { fn full_buffer<'s>(&'s mut self) -> &'s [u8] { assert!(self.buffer_idx == 64); self.buffer_idx = 0; - return self.buffer[..64]; + return self.buffer.index(&(0..64)); } fn position(&self) -> uint { self.buffer_idx } diff --git a/src/librustc_back/svh.rs b/src/librustc_back/svh.rs index 86bd74d3f85e5..863c1a7c865f3 100644 --- a/src/librustc_back/svh.rs +++ b/src/librustc_back/svh.rs @@ -65,7 +65,7 @@ impl Svh { } pub fn as_str<'a>(&'a self) -> &'a str { - self.hash[] + self.hash.index(&FullRange) } pub fn calculate(metadata: &Vec, krate: &ast::Crate) -> Svh { @@ -119,6 +119,14 @@ impl Svh { } impl fmt::Show for Svh { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + //NOTE(stage0): uncomment after snapshot + //write!(f, "Svh {{ {} }}", self.as_str()) + fmt::String::fmt(self, f) + } +} + +impl fmt::String for Svh { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.pad(self.as_str()) } @@ -358,7 +366,7 @@ mod svh_visitor { fn macro_name(mac: &Mac) -> token::InternedString { match &mac.node { &MacInvocTT(ref path, ref _tts, ref _stx_ctxt) => { - let s = path.segments[]; + let s = path.segments.index(&FullRange); assert_eq!(s.len(), 1); content(s[0].identifier) } diff --git a/src/librustc_back/target/mod.rs b/src/librustc_back/target/mod.rs index f14583bb9aa81..23c8fc7de51b5 100644 --- a/src/librustc_back/target/mod.rs +++ b/src/librustc_back/target/mod.rs @@ -225,7 +225,7 @@ impl Target { Some(val) => val, None => handler.fatal((format!("Field {} in target specification is required", name)) - []) + .index(&FullRange)) } }; @@ -242,16 +242,18 @@ impl Target { macro_rules! key { ($key_name:ident) => ( { let name = (stringify!($key_name)).replace("_", "-"); - obj.find(name[]).map(|o| o.as_string() + obj.find(name.index(&FullRange)).map(|o| o.as_string() .map(|s| base.options.$key_name = s.to_string())); } ); ($key_name:ident, bool) => ( { let name = (stringify!($key_name)).replace("_", "-"); - obj.find(name[]).map(|o| o.as_boolean().map(|s| base.options.$key_name = s)); + obj.find(name.index(&FullRange)) + .map(|o| o.as_boolean() + .map(|s| base.options.$key_name = s)); } ); ($key_name:ident, list) => ( { let name = (stringify!($key_name)).replace("_", "-"); - obj.find(name[]).map(|o| o.as_array() + obj.find(name.index(&FullRange)).map(|o| o.as_array() .map(|v| base.options.$key_name = v.iter() .map(|a| a.as_string().unwrap().to_string()).collect() ) @@ -299,8 +301,8 @@ impl Target { use serialize::json; fn load_file(path: &Path) -> Result { - let mut f = try!(File::open(path).map_err(|e| e.to_string())); - let obj = try!(json::from_reader(&mut f).map_err(|e| e.to_string())); + let mut f = try!(File::open(path).map_err(|e| format!("{:?}", e))); + let obj = try!(json::from_reader(&mut f).map_err(|e| format!("{:?}", e))); Ok(Target::from_json(obj)) } @@ -313,7 +315,7 @@ impl Target { $( else if target == stringify!($name) { let t = $name::target(); - debug!("Got builtin target: {}", t); + debug!("Got builtin target: {:?}", t); return Ok(t); } )* @@ -367,7 +369,7 @@ impl Target { let target_path = os::getenv("RUST_TARGET_PATH").unwrap_or(String::new()); - let paths = os::split_paths(target_path[]); + let paths = os::split_paths(target_path.index(&FullRange)); // FIXME 16351: add a sane default search path? for dir in paths.iter() { @@ -377,6 +379,6 @@ impl Target { } } - Err(format!("Could not find specification for target {}", target)) + Err(format!("Could not find specification for target {:?}", target)) } } diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index cb77519671cf5..d942581ca62f2 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -96,7 +96,7 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> { consume_span: Span, cmt: mc::cmt<'tcx>, mode: euv::ConsumeMode) { - debug!("consume(consume_id={}, cmt={}, mode={})", + debug!("consume(consume_id={}, cmt={}, mode={:?})", consume_id, cmt.repr(self.tcx()), mode); self.consume_common(consume_id, consume_span, cmt, mode); @@ -111,7 +111,7 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> { consume_pat: &ast::Pat, cmt: mc::cmt<'tcx>, mode: euv::ConsumeMode) { - debug!("consume_pat(consume_pat={}, cmt={}, mode={})", + debug!("consume_pat(consume_pat={}, cmt={}, mode={:?})", consume_pat.repr(self.tcx()), cmt.repr(self.tcx()), mode); @@ -127,8 +127,8 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> { bk: ty::BorrowKind, loan_cause: euv::LoanCause) { - debug!("borrow(borrow_id={}, cmt={}, loan_region={}, \ - bk={}, loan_cause={})", + debug!("borrow(borrow_id={}, cmt={}, loan_region={:?}, \ + bk={:?}, loan_cause={:?})", borrow_id, cmt.repr(self.tcx()), loan_region, bk, loan_cause); @@ -355,10 +355,10 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { //! (Note that some loans can be *issued* without necessarily //! taking effect yet.) - debug!("check_for_conflicting_loans(scope={})", scope); + debug!("check_for_conflicting_loans(scope={:?})", scope); let new_loan_indices = self.loans_generated_by(scope); - debug!("new_loan_indices = {}", new_loan_indices); + debug!("new_loan_indices = {:?}", new_loan_indices); self.each_issued_loan(scope, |issued_loan| { for &new_loan_index in new_loan_indices.iter() { @@ -465,7 +465,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan.span, format!("cannot borrow `{}`{} as mutable \ more than once at a time", - nl, new_loan_msg)[]) + nl, new_loan_msg).index(&FullRange)) } (ty::UniqueImmBorrow, _) => { @@ -473,7 +473,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan.span, format!("closure requires unique access to `{}` \ but {} is already borrowed{}", - nl, ol_pronoun, old_loan_msg)[]); + nl, ol_pronoun, old_loan_msg).index(&FullRange)); } (_, ty::UniqueImmBorrow) => { @@ -481,7 +481,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan.span, format!("cannot borrow `{}`{} as {} because \ previous closure requires unique access", - nl, new_loan_msg, new_loan.kind.to_user_str())[]); + nl, new_loan_msg, new_loan.kind.to_user_str()).index(&FullRange)); } (_, _) => { @@ -494,7 +494,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan.kind.to_user_str(), ol_pronoun, old_loan.kind.to_user_str(), - old_loan_msg)[]); + old_loan_msg).index(&FullRange)); } } @@ -503,7 +503,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_note( span, format!("borrow occurs due to use of `{}` in closure", - nl)[]); + nl).index(&FullRange)); } _ => { } } @@ -552,7 +552,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_note( old_loan.span, - format!("{}; {}", borrow_summary, rule_summary)[]); + format!("{}; {}", borrow_summary, rule_summary).index(&FullRange)); let old_loan_span = self.tcx().map.span(old_loan.kill_scope.node_id()); self.bccx.span_end_note(old_loan_span, @@ -622,13 +622,13 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_err( span, format!("cannot use `{}` because it was mutably borrowed", - self.bccx.loan_path_to_string(copy_path)[]) - []); + self.bccx.loan_path_to_string(copy_path).index(&FullRange)) + .index(&FullRange)); self.bccx.span_note( loan_span, format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(&*loan_path)[]) - []); + self.bccx.loan_path_to_string(&*loan_path).index(&FullRange)) + .index(&FullRange)); } } } @@ -647,20 +647,20 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { let err_message = match move_kind { move_data::Captured => format!("cannot move `{}` into closure because it is borrowed", - self.bccx.loan_path_to_string(move_path)[]), + self.bccx.loan_path_to_string(move_path).index(&FullRange)), move_data::Declared | move_data::MoveExpr | move_data::MovePat => format!("cannot move out of `{}` because it is borrowed", - self.bccx.loan_path_to_string(move_path)[]) + self.bccx.loan_path_to_string(move_path).index(&FullRange)) }; - self.bccx.span_err(span, err_message[]); + self.bccx.span_err(span, err_message.index(&FullRange)); self.bccx.span_note( loan_span, format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(&*loan_path)[]) - []); + self.bccx.loan_path_to_string(&*loan_path).index(&FullRange)) + .index(&FullRange)); } } } @@ -696,7 +696,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { span: Span, use_kind: MovedValueUseKind, lp: &Rc>) { - debug!("check_if_path_is_moved(id={}, use_kind={}, lp={})", + debug!("check_if_path_is_moved(id={}, use_kind={:?}, lp={})", id, use_kind, lp.repr(self.bccx.tcx)); let base_lp = owned_ptr_base_path_rc(lp); self.move_data.each_move_of(id, &base_lp, |the_move, moved_lp| { @@ -810,7 +810,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_err( assignment_span, format!("cannot assign to {}", - self.bccx.cmt_to_string(&*assignee_cmt))[]); + self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange)); self.bccx.span_help( self.tcx().map.span(upvar_id.closure_expr_id), "consider changing this closure to take self by mutable reference"); @@ -819,7 +819,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { assignment_span, format!("cannot assign to {} {}", assignee_cmt.mutbl.to_user_str(), - self.bccx.cmt_to_string(&*assignee_cmt))[]); + self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange)); } } _ => match opt_loan_path(&assignee_cmt) { @@ -829,14 +829,14 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { format!("cannot assign to {} {} `{}`", assignee_cmt.mutbl.to_user_str(), self.bccx.cmt_to_string(&*assignee_cmt), - self.bccx.loan_path_to_string(&*lp))[]); + self.bccx.loan_path_to_string(&*lp)).index(&FullRange)); } None => { self.bccx.span_err( assignment_span, format!("cannot assign to {} {}", assignee_cmt.mutbl.to_user_str(), - self.bccx.cmt_to_string(&*assignee_cmt))[]); + self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange)); } } } @@ -956,10 +956,10 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_err( span, format!("cannot assign to `{}` because it is borrowed", - self.bccx.loan_path_to_string(loan_path))[]); + self.bccx.loan_path_to_string(loan_path)).index(&FullRange)); self.bccx.span_note( loan.span, format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(loan_path))[]); + self.bccx.loan_path_to_string(loan_path)).index(&FullRange)); } } diff --git a/src/librustc_borrowck/borrowck/doc.rs b/src/librustc_borrowck/borrowck/doc.rs index ac2ab56b2c5f8..2100d5a9bc307 100644 --- a/src/librustc_borrowck/borrowck/doc.rs +++ b/src/librustc_borrowck/borrowck/doc.rs @@ -660,7 +660,7 @@ //! necessary to add any restrictions at all to the final result. //! //! ```text -//! RESTRICTIONS(*LV, LT, []) = [] // R-Deref-Freeze-Borrowed +//! RESTRICTIONS(*LV, LT, []) = [] // R-Deref-Freeze-Borrowed //! TYPE(LV) = &const Ty //! ``` //! diff --git a/src/librustc_borrowck/borrowck/fragments.rs b/src/librustc_borrowck/borrowck/fragments.rs index 0d86811af9f49..d752748746589 100644 --- a/src/librustc_borrowck/borrowck/fragments.rs +++ b/src/librustc_borrowck/borrowck/fragments.rs @@ -38,7 +38,7 @@ enum Fragment { // This represents the collection of all but one of the elements // from an array at the path described by the move path index. // Note that attached MovePathIndex should have mem_categorization - // of InteriorElement (i.e. array dereference `[]`). + // of InteriorElement (i.e. array dereference `.index(&FullRange)`). AllButOneFrom(MovePathIndex), } @@ -123,12 +123,12 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, let attrs : &[ast::Attribute]; attrs = match tcx.map.find(id) { Some(ast_map::NodeItem(ref item)) => - item.attrs[], + item.attrs.index(&FullRange), Some(ast_map::NodeImplItem(&ast::MethodImplItem(ref m))) => - m.attrs[], + m.attrs.index(&FullRange), Some(ast_map::NodeTraitItem(&ast::ProvidedMethod(ref m))) => - m.attrs[], - _ => [][], + m.attrs.index(&FullRange), + _ => [].index(&FullRange), }; let span_err = @@ -144,7 +144,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, for (i, mpi) in vec_rc.iter().enumerate() { let render = |&:| this.path_loan_path(*mpi).user_string(tcx); if span_err { - tcx.sess.span_err(sp, format!("{}: `{}`", kind, render())[]); + tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).index(&FullRange)); } if print { println!("id:{} {}[{}] `{}`", id, kind, i, render()); @@ -156,7 +156,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, for (i, f) in vec_rc.iter().enumerate() { let render = |&:| f.loan_path_user_string(this, tcx); if span_err { - tcx.sess.span_err(sp, format!("{}: `{}`", kind, render())[]); + tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).index(&FullRange)); } if print { println!("id:{} {}[{}] `{}`", id, kind, i, render()); @@ -198,11 +198,11 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { // First, filter out duplicates moved.sort(); moved.dedup(); - debug!("fragments 1 moved: {}", path_lps(moved[])); + debug!("fragments 1 moved: {:?}", path_lps(moved.index(&FullRange))); assigned.sort(); assigned.dedup(); - debug!("fragments 1 assigned: {}", path_lps(assigned[])); + debug!("fragments 1 assigned: {:?}", path_lps(assigned.index(&FullRange))); // Second, build parents from the moved and assigned. for m in moved.iter() { @@ -222,14 +222,14 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { parents.sort(); parents.dedup(); - debug!("fragments 2 parents: {}", path_lps(parents[])); + debug!("fragments 2 parents: {:?}", path_lps(parents.index(&FullRange))); // Third, filter the moved and assigned fragments down to just the non-parents - moved.retain(|f| non_member(*f, parents[])); - debug!("fragments 3 moved: {}", path_lps(moved[])); + moved.retain(|f| non_member(*f, parents.index(&FullRange))); + debug!("fragments 3 moved: {:?}", path_lps(moved.index(&FullRange))); - assigned.retain(|f| non_member(*f, parents[])); - debug!("fragments 3 assigned: {}", path_lps(assigned[])); + assigned.retain(|f| non_member(*f, parents.index(&FullRange))); + debug!("fragments 3 assigned: {:?}", path_lps(assigned.index(&FullRange))); // Fourth, build the leftover from the moved, assigned, and parents. for m in moved.iter() { @@ -247,16 +247,16 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { unmoved.sort(); unmoved.dedup(); - debug!("fragments 4 unmoved: {}", frag_lps(unmoved[])); + debug!("fragments 4 unmoved: {:?}", frag_lps(unmoved.index(&FullRange))); // Fifth, filter the leftover fragments down to its core. unmoved.retain(|f| match *f { AllButOneFrom(_) => true, - Just(mpi) => non_member(mpi, parents[]) && - non_member(mpi, moved[]) && - non_member(mpi, assigned[]) + Just(mpi) => non_member(mpi, parents.index(&FullRange)) && + non_member(mpi, moved.index(&FullRange)) && + non_member(mpi, assigned.index(&FullRange)) }); - debug!("fragments 5 unmoved: {}", frag_lps(unmoved[])); + debug!("fragments 5 unmoved: {:?}", frag_lps(unmoved.index(&FullRange))); // Swap contents back in. fragments.unmoved_fragments = unmoved; @@ -430,10 +430,10 @@ fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>, } ref sty_and_variant_info => { - let msg = format!("type {} ({}) is not fragmentable", + let msg = format!("type {} ({:?}) is not fragmentable", parent_ty.repr(tcx), sty_and_variant_info); let opt_span = origin_id.and_then(|id|tcx.map.opt_span(id)); - tcx.sess.opt_span_bug(opt_span, msg[]) + tcx.sess.opt_span_bug(opt_span, msg.index(&FullRange)) } } } diff --git a/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs b/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs index 6f02f447a158d..ca1fba53de42a 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs @@ -65,7 +65,7 @@ pub fn gather_match_variant<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, cmt: mc::cmt<'tcx>, mode: euv::MatchMode) { let tcx = bccx.tcx; - debug!("gather_match_variant(move_pat={}, cmt={}, mode={})", + debug!("gather_match_variant(move_pat={}, cmt={}, mode={:?})", move_pat.id, cmt.repr(tcx), mode); let opt_lp = opt_loan_path(&cmt); diff --git a/src/librustc_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_borrowck/borrowck/gather_loans/mod.rs index 1e9e5b22aa0ed..2c48e0da01d77 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/mod.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/mod.rs @@ -76,7 +76,7 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> { _consume_span: Span, cmt: mc::cmt<'tcx>, mode: euv::ConsumeMode) { - debug!("consume(consume_id={}, cmt={}, mode={})", + debug!("consume(consume_id={}, cmt={}, mode={:?})", consume_id, cmt.repr(self.tcx()), mode); match mode { @@ -93,7 +93,7 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> { matched_pat: &ast::Pat, cmt: mc::cmt<'tcx>, mode: euv::MatchMode) { - debug!("matched_pat(matched_pat={}, cmt={}, mode={})", + debug!("matched_pat(matched_pat={}, cmt={}, mode={:?})", matched_pat.repr(self.tcx()), cmt.repr(self.tcx()), mode); @@ -109,7 +109,7 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> { consume_pat: &ast::Pat, cmt: mc::cmt<'tcx>, mode: euv::ConsumeMode) { - debug!("consume_pat(consume_pat={}, cmt={}, mode={})", + debug!("consume_pat(consume_pat={}, cmt={}, mode={:?})", consume_pat.repr(self.tcx()), cmt.repr(self.tcx()), mode); @@ -132,8 +132,8 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> { bk: ty::BorrowKind, loan_cause: euv::LoanCause) { - debug!("borrow(borrow_id={}, cmt={}, loan_region={}, \ - bk={}, loan_cause={})", + debug!("borrow(borrow_id={}, cmt={}, loan_region={:?}, \ + bk={:?}, loan_cause={:?})", borrow_id, cmt.repr(self.tcx()), loan_region, bk, loan_cause); @@ -235,7 +235,7 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { loan_region: ty::Region, cause: euv::LoanCause) { debug!("guarantee_valid(borrow_id={}, cmt={}, \ - req_mutbl={}, loan_region={})", + req_mutbl={:?}, loan_region={:?})", borrow_id, cmt.repr(self.tcx()), req_kind, @@ -273,7 +273,7 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { self.bccx, borrow_span, cause, cmt.clone(), loan_region); - debug!("guarantee_valid(): restrictions={}", restr); + debug!("guarantee_valid(): restrictions={:?}", restr); // Create the loan record (if needed). let loan = match restr { @@ -306,18 +306,18 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { ty::ReInfer(..) => { self.tcx().sess.span_bug( cmt.span, - format!("invalid borrow lifetime: {}", - loan_region)[]); + format!("invalid borrow lifetime: {:?}", + loan_region).index(&FullRange)); } }; - debug!("loan_scope = {}", loan_scope); + debug!("loan_scope = {:?}", loan_scope); let borrow_scope = region::CodeExtent::from_node_id(borrow_id); let gen_scope = self.compute_gen_scope(borrow_scope, loan_scope); - debug!("gen_scope = {}", gen_scope); + debug!("gen_scope = {:?}", gen_scope); let kill_scope = self.compute_kill_scope(loan_scope, &*loan_path); - debug!("kill_scope = {}", kill_scope); + debug!("kill_scope = {:?}", kill_scope); if req_kind == ty::MutBorrow { self.mark_loan_path_as_mutated(&*loan_path); diff --git a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs index 95c5d9415a125..1bb143e1dc8d3 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs @@ -120,7 +120,7 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, bccx.span_err( move_from.span, format!("cannot move out of {}", - bccx.cmt_to_string(&*move_from))[]); + bccx.cmt_to_string(&*move_from)).index(&FullRange)); } mc::cat_downcast(ref b, _) | @@ -132,7 +132,7 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, move_from.span, format!("cannot move out of type `{}`, \ which defines the `Drop` trait", - b.ty.user_string(bccx.tcx))[]); + b.ty.user_string(bccx.tcx)).index(&FullRange)); }, _ => panic!("this path should not cause illegal move") } @@ -155,10 +155,10 @@ fn note_move_destination(bccx: &BorrowckCtxt, format!("to prevent the move, \ use `ref {0}` or `ref mut {0}` to capture value by \ reference", - pat_name)[]); + pat_name).index(&FullRange)); } else { bccx.span_note(move_to_span, format!("and here (use `ref {0}` or `ref mut {0}`)", - pat_name)[]); + pat_name).index(&FullRange)); } } diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 20949151557cb..88f56f6862218 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -137,7 +137,7 @@ fn borrowck_fn(this: &mut BorrowckCtxt, check_loans::check_loans(this, &loan_dfcx, flowed_moves, - all_loans[], + all_loans.index(&FullRange), id, decl, body); @@ -505,7 +505,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { pub fn report(&self, err: BckError<'tcx>) { self.span_err( err.span, - self.bckerr_to_string(&err)[]); + self.bckerr_to_string(&err).index(&FullRange)); self.note_and_explain_bckerr(err); } @@ -527,7 +527,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { use_span, format!("{} of possibly uninitialized variable: `{}`", verb, - self.loan_path_to_string(lp))[]); + self.loan_path_to_string(lp)).index(&FullRange)); (self.loan_path_to_string(moved_lp), String::new()) } @@ -569,7 +569,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { format!("{} of {}moved value: `{}`", verb, msg, - nl)[]); + nl).index(&FullRange)); (ol, moved_lp_msg) } }; @@ -586,9 +586,9 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } r => { self.tcx.sess.bug(format!("MoveExpr({}) maps to \ - {}, not Expr", + {:?}, not Expr", the_move.id, - r)[]) + r).index(&FullRange)) } }; let (suggestion, _) = @@ -599,7 +599,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { ol, moved_lp_msg, expr_ty.user_string(self.tcx), - suggestion)[]); + suggestion).index(&FullRange)); } move_data::MovePat => { @@ -610,7 +610,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { which is moved by default", ol, moved_lp_msg, - pat_ty.user_string(self.tcx))[]); + pat_ty.user_string(self.tcx)).index(&FullRange)); self.tcx.sess.span_help(span, "use `ref` to override"); } @@ -624,9 +624,9 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } r => { self.tcx.sess.bug(format!("Captured({}) maps to \ - {}, not Expr", + {:?}, not Expr", the_move.id, - r)[]) + r).index(&FullRange)) } }; let (suggestion, help) = @@ -642,7 +642,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { ol, moved_lp_msg, expr_ty.user_string(self.tcx), - suggestion)[]); + suggestion).index(&FullRange)); self.tcx.sess.span_help(expr_span, help); } } @@ -673,7 +673,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.span_err( span, format!("re-assignment of immutable variable `{}`", - self.loan_path_to_string(lp))[]); + self.loan_path_to_string(lp)).index(&FullRange)); self.tcx.sess.span_note(assign.span, "prior assignment occurs here"); } @@ -799,12 +799,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.span_err( span, format!("{} in an aliasable location", - prefix)[]); + prefix).index(&FullRange)); } mc::AliasableClosure(id) => { self.tcx.sess.span_err(span, format!("{} in a captured outer \ - variable in an `Fn` closure", prefix)[]); + variable in an `Fn` closure", prefix).as_slice()); span_help!(self.tcx.sess, self.tcx.map.span(id), "consider changing this closure to take self by mutable reference"); } @@ -812,12 +812,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { mc::AliasableStaticMut(..) => { self.tcx.sess.span_err( span, - format!("{} in a static location", prefix)[]); + format!("{} in a static location", prefix).index(&FullRange)); } mc::AliasableBorrowed => { self.tcx.sess.span_err( span, - format!("{} in a `&` reference", prefix)[]); + format!("{} in a `&` reference", prefix).index(&FullRange)); } } @@ -885,12 +885,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { note_and_explain_region( self.tcx, format!("{} would have to be valid for ", - descr)[], + descr).index(&FullRange), loan_scope, "..."); note_and_explain_region( self.tcx, - format!("...but {} is only valid for ", descr)[], + format!("...but {} is only valid for ", descr).index(&FullRange), ptr_scope, ""); } @@ -910,7 +910,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { out.push('('); self.append_loan_path_to_string(&**lp_base, out); out.push_str(DOWNCAST_PRINTED_OPERATOR); - out.push_str(ty::item_path_str(self.tcx, variant_def_id)[]); + out.push_str(ty::item_path_str(self.tcx, variant_def_id).index(&FullRange)); out.push(')'); } @@ -924,7 +924,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } mc::PositionalField(idx) => { out.push('.'); - out.push_str(idx.to_string()[]); + out.push_str(idx.to_string().index(&FullRange)); } } } @@ -956,7 +956,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { out.push('('); self.append_autoderefd_loan_path_to_string(&**lp_base, out); out.push(':'); - out.push_str(ty::item_path_str(self.tcx, variant_def_id)[]); + out.push_str(ty::item_path_str(self.tcx, variant_def_id).index(&FullRange)); out.push(')'); } @@ -1005,7 +1005,7 @@ impl DataFlowOperator for LoanDataFlowOperator { impl<'tcx> Repr<'tcx> for Loan<'tcx> { fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String { - format!("Loan_{}({}, {}, {}-{}, {})", + format!("Loan_{}({}, {:?}, {:?}-{:?}, {})", self.index, self.loan_path.repr(tcx), self.kind, diff --git a/src/librustc_borrowck/borrowck/move_data.rs b/src/librustc_borrowck/borrowck/move_data.rs index b49164f0c2547..21b3c910d328a 100644 --- a/src/librustc_borrowck/borrowck/move_data.rs +++ b/src/librustc_borrowck/borrowck/move_data.rs @@ -311,7 +311,7 @@ impl<'tcx> MoveData<'tcx> { } }; - debug!("move_path(lp={}, index={})", + debug!("move_path(lp={}, index={:?})", lp.repr(tcx), index); @@ -362,7 +362,7 @@ impl<'tcx> MoveData<'tcx> { lp: Rc>, id: ast::NodeId, kind: MoveKind) { - debug!("add_move(lp={}, id={}, kind={})", + debug!("add_move(lp={}, id={}, kind={:?})", lp.repr(tcx), id, kind); @@ -413,12 +413,12 @@ impl<'tcx> MoveData<'tcx> { }; if self.is_var_path(path_index) { - debug!("add_assignment[var](lp={}, assignment={}, path_index={})", + debug!("add_assignment[var](lp={}, assignment={}, path_index={:?})", lp.repr(tcx), self.var_assignments.borrow().len(), path_index); self.var_assignments.borrow_mut().push(assignment); } else { - debug!("add_assignment[path](lp={}, path_index={})", + debug!("add_assignment[path](lp={}, path_index={:?})", lp.repr(tcx), path_index); self.path_assignments.borrow_mut().push(assignment); diff --git a/src/librustc_borrowck/graphviz.rs b/src/librustc_borrowck/graphviz.rs index f2c35851d0d7c..647a5dd559c1d 100644 --- a/src/librustc_borrowck/graphviz.rs +++ b/src/librustc_borrowck/graphviz.rs @@ -53,14 +53,14 @@ pub struct DataflowLabeller<'a, 'tcx: 'a> { impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { fn dataflow_for(&self, e: EntryOrExit, n: &Node<'a>) -> String { let id = n.1.data.id; - debug!("dataflow_for({}, id={}) {}", e, id, self.variants); + debug!("dataflow_for({:?}, id={}) {:?}", e, id, self.variants); let mut sets = "".to_string(); let mut seen_one = false; for &variant in self.variants.iter() { if seen_one { sets.push_str(" "); } else { seen_one = true; } sets.push_str(variant.short_name()); sets.push_str(": "); - sets.push_str(self.dataflow_for_variant(e, n, variant)[]); + sets.push_str(self.dataflow_for_variant(e, n, variant).index(&FullRange)); } sets } @@ -89,7 +89,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { set.push_str(", "); } let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp); - set.push_str(loan_str[]); + set.push_str(loan_str.index(&FullRange)); saw_some = true; true }); @@ -101,7 +101,8 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { let dfcx = &self.analysis_data.loans; let loan_index_to_path = |&mut: loan_index| { let all_loans = &self.analysis_data.all_loans; - all_loans[loan_index].loan_path() + let l: &borrowck::Loan = &all_loans[loan_index]; + l.loan_path() }; self.build_set(e, cfgidx, dfcx, loan_index_to_path) } @@ -111,7 +112,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { let move_index_to_path = |&mut: move_index| { let move_data = &self.analysis_data.move_data.move_data; let moves = move_data.moves.borrow(); - let the_move = &(*moves)[move_index]; + let the_move: &borrowck::move_data::Move = &(*moves)[move_index]; move_data.path_loan_path(the_move.path) }; self.build_set(e, cfgidx, dfcx, move_index_to_path) @@ -122,7 +123,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { let assign_index_to_path = |&mut: assign_index| { let move_data = &self.analysis_data.move_data.move_data; let assignments = move_data.var_assignments.borrow(); - let assignment = &(*assignments)[assign_index]; + let assignment: &borrowck::move_data::Assignment = &(*assignments)[assign_index]; move_data.path_loan_path(assignment.path) }; self.build_set(e, cfgidx, dfcx, assign_index_to_path) diff --git a/src/librustc_borrowck/lib.rs b/src/librustc_borrowck/lib.rs index 0600ddba01897..26bcd5f4c10cd 100644 --- a/src/librustc_borrowck/lib.rs +++ b/src/librustc_borrowck/lib.rs @@ -17,28 +17,13 @@ html_root_url = "http://doc.rust-lang.org/nightly/")] #![allow(unknown_features)] -#![feature(default_type_params, globs, macro_rules, phase, quote)] +#![feature(quote)] #![feature(slicing_syntax, unsafe_destructor)] #![feature(rustc_diagnostic_macros)] -#![feature(unboxed_closures)] -#![feature(old_orphan_check)] #![allow(non_camel_case_types)] -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate log; - -#[cfg(not(stage0))] -#[macro_use] -extern crate log; - -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate syntax; - -#[cfg(not(stage0))] -#[macro_use] -extern crate syntax; +#[macro_use] extern crate log; +#[macro_use] extern crate syntax; // for "clarity", rename the graphviz crate to dot; graphviz within `borrowck` // refers to the borrowck-specific graphviz adapter traits. diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 74f81ae9d6d1e..52d49924d05d5 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -58,12 +58,12 @@ pub fn compile_input(sess: Session, let outputs = build_output_filenames(input, outdir, output, - krate.attrs[], + krate.attrs.index(&FullRange), &sess); - let id = link::find_crate_name(Some(&sess), krate.attrs[], + let id = link::find_crate_name(Some(&sess), krate.attrs.index(&FullRange), input); let expanded_crate - = match phase_2_configure_and_expand(&sess, krate, id[], + = match phase_2_configure_and_expand(&sess, krate, id.index(&FullRange), addl_plugins) { None => return, Some(k) => k @@ -75,7 +75,7 @@ pub fn compile_input(sess: Session, let mut forest = ast_map::Forest::new(expanded_crate); let ast_map = assign_node_ids_and_map(&sess, &mut forest); - write_out_deps(&sess, input, &outputs, id[]); + write_out_deps(&sess, input, &outputs, id.index(&FullRange)); if stop_after_phase_2(&sess) { return; } @@ -171,9 +171,9 @@ pub fn phase_2_configure_and_expand(sess: &Session, let time_passes = sess.time_passes(); *sess.crate_types.borrow_mut() = - collect_crate_types(sess, krate.attrs[]); + collect_crate_types(sess, krate.attrs.index(&FullRange)); *sess.crate_metadata.borrow_mut() = - collect_crate_metadata(sess, krate.attrs[]); + collect_crate_metadata(sess, krate.attrs.index(&FullRange)); time(time_passes, "recursion limit", (), |_| { middle::recursion_limit::update_recursion_limit(sess, &krate); @@ -268,8 +268,8 @@ pub fn phase_2_configure_and_expand(sess: &Session, if cfg!(windows) { _old_path = os::getenv("PATH").unwrap_or(_old_path); let mut new_path = sess.host_filesearch(PathKind::All).get_dylib_search_paths(); - new_path.extend(os::split_paths(_old_path[]).into_iter()); - os::setenv("PATH", os::join_paths(new_path[]).unwrap()); + new_path.extend(os::split_paths(_old_path.index(&FullRange)).into_iter()); + os::setenv("PATH", os::join_paths(new_path.index(&FullRange)).unwrap()); } let cfg = syntax::ext::expand::ExpansionConfig { crate_name: crate_name.to_string(), @@ -533,7 +533,7 @@ pub fn phase_5_run_llvm_passes(sess: &Session, time(sess.time_passes(), "LLVM passes", (), |_| write::run_passes(sess, trans, - sess.opts.output_types[], + sess.opts.output_types.index(&FullRange), outputs)); } @@ -547,14 +547,14 @@ pub fn phase_6_link_output(sess: &Session, outputs: &OutputFilenames) { let old_path = os::getenv("PATH").unwrap_or_else(||String::new()); let mut new_path = sess.host_filesearch(PathKind::All).get_tools_search_paths(); - new_path.extend(os::split_paths(old_path[]).into_iter()); - os::setenv("PATH", os::join_paths(new_path[]).unwrap()); + new_path.extend(os::split_paths(old_path.index(&FullRange)).into_iter()); + os::setenv("PATH", os::join_paths(new_path.index(&FullRange)).unwrap()); time(sess.time_passes(), "linking", (), |_| link::link_binary(sess, trans, outputs, - trans.link.crate_name[])); + trans.link.crate_name.index(&FullRange))); os::setenv("PATH", old_path); } @@ -643,7 +643,7 @@ fn write_out_deps(sess: &Session, // write Makefile-compatible dependency rules let files: Vec = sess.codemap().files.borrow() .iter().filter(|fmap| fmap.is_real_file()) - .map(|fmap| escape_dep_filename(fmap.name[])) + .map(|fmap| escape_dep_filename(fmap.name.index(&FullRange))) .collect(); let mut file = try!(io::File::create(&deps_filename)); for path in out_filenames.iter() { @@ -657,7 +657,7 @@ fn write_out_deps(sess: &Session, Ok(()) => {} Err(e) => { sess.fatal(format!("error writing dependencies to `{}`: {}", - deps_filename.display(), e)[]); + deps_filename.display(), e).index(&FullRange)); } } } @@ -726,9 +726,9 @@ pub fn collect_crate_types(session: &Session, let res = !link::invalid_output_for_target(session, *crate_type); if !res { - session.warn(format!("dropping unsupported crate type `{}` \ + session.warn(format!("dropping unsupported crate type `{:?}` \ for target `{}`", - *crate_type, session.opts.target_triple)[]); + *crate_type, session.opts.target_triple).index(&FullRange)); } res diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 89b2e0f257acd..5af114abeea77 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -22,11 +22,9 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![feature(default_type_params, globs, macro_rules, phase, quote)] +#![feature(quote)] #![feature(slicing_syntax, unsafe_destructor)] #![feature(rustc_diagnostic_macros)] -#![feature(unboxed_closures)] -#![feature(associated_types)] extern crate arena; extern crate flate; @@ -41,22 +39,8 @@ extern crate rustc_trans; extern crate rustc_typeck; extern crate serialize; extern crate "rustc_llvm" as llvm; - -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate log; - -#[cfg(not(stage0))] -#[macro_use] -extern crate log; - -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate syntax; - -#[cfg(not(stage0))] -#[macro_use] -extern crate syntax; +#[macro_use] extern crate log; +#[macro_use] extern crate syntax; pub use syntax::diagnostic; @@ -105,12 +89,12 @@ fn run_compiler(args: &[String]) { let descriptions = diagnostics::registry::Registry::new(&DIAGNOSTICS); match matches.opt_str("explain") { Some(ref code) => { - match descriptions.find_description(code[]) { + match descriptions.find_description(code.index(&FullRange)) { Some(ref description) => { println!("{}", description); } None => { - early_error(format!("no extended information for {}", code)[]); + early_error(format!("no extended information for {}", code).index(&FullRange)); } } return; @@ -136,7 +120,7 @@ fn run_compiler(args: &[String]) { early_error("no input filename given"); } 1u => { - let ifile = matches.free[0][]; + let ifile = matches.free[0].index(&FullRange); if ifile == "-" { let contents = io::stdin().read_to_end().unwrap(); let src = String::from_utf8(contents).unwrap(); @@ -313,7 +297,7 @@ Available lint options: for lint in lints.into_iter() { let name = lint.name_lower().replace("_", "-"); println!(" {} {:7.7} {}", - padded(name[]), lint.default_level.as_str(), lint.desc); + padded(name.index(&FullRange)), lint.default_level.as_str(), lint.desc); } println!("\n"); }; @@ -343,7 +327,7 @@ Available lint options: let desc = to.into_iter().map(|x| x.as_str().replace("_", "-")) .collect::>().connect(", "); println!(" {} {}", - padded(name[]), desc); + padded(name.index(&FullRange)), desc); } println!("\n"); }; @@ -409,7 +393,7 @@ pub fn handle_options(mut args: Vec) -> Option { } let matches = - match getopts::getopts(args[], config::optgroups()[]) { + match getopts::getopts(args.index(&FullRange), config::optgroups().index(&FullRange)) { Ok(m) => m, Err(f_stable_attempt) => { // redo option parsing, including unstable options this time, @@ -559,7 +543,7 @@ pub fn monitor(f: F) { cfg = cfg.stack_size(STACK_SIZE); } - match cfg.spawn(move || { std::io::stdio::set_stderr(box w); f() }).join() { + match cfg.scoped(move || { std::io::stdio::set_stderr(box w); f() }).join() { Ok(()) => { /* fallthrough */ } Err(value) => { // Thread panicked without emitting a fatal diagnostic @@ -583,7 +567,7 @@ pub fn monitor(f: F) { "run with `RUST_BACKTRACE=1` for a backtrace".to_string(), ]; for note in xs.iter() { - emitter.emit(None, note[], None, diagnostic::Note) + emitter.emit(None, note.index(&FullRange), None, diagnostic::Note) } match r.read_to_string() { @@ -591,7 +575,7 @@ pub fn monitor(f: F) { Err(e) => { emitter.emit(None, format!("failed to read internal \ - stderr: {}", e)[], + stderr: {}", e).index(&FullRange), None, diagnostic::Error) } diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 61fd7d16ab7dd..44a35ef6be70b 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -296,7 +296,7 @@ impl<'tcx> pprust::PpAnn for TypedAnnotation<'tcx> { try!(pp::word(&mut s.s, ppaux::ty_to_string( tcx, - ty::expr_ty(tcx, expr))[])); + ty::expr_ty(tcx, expr)).index(&FullRange))); s.pclose() } _ => Ok(()) @@ -370,7 +370,7 @@ impl UserIdentifiedItem { ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).into_iter()), ItemViaPath(ref parts) => - NodesMatchingSuffix(map.nodes_matching_suffix(parts[])), + NodesMatchingSuffix(map.nodes_matching_suffix(parts.index(&FullRange))), } } @@ -382,7 +382,7 @@ impl UserIdentifiedItem { user_option, self.reconstructed_input(), is_wrong_because); - sess.fatal(message[]) + sess.fatal(message.index(&FullRange)) }; let mut saw_node = ast::DUMMY_NODE_ID; @@ -509,7 +509,7 @@ pub fn pretty_print_input(sess: Session, let is_expanded = needs_expansion(&ppm); let compute_ast_map = needs_ast_map(&ppm, &opt_uii); let krate = if compute_ast_map { - match driver::phase_2_configure_and_expand(&sess, krate, id[], None) { + match driver::phase_2_configure_and_expand(&sess, krate, id.index(&FullRange), None) { None => return, Some(k) => k } @@ -528,7 +528,7 @@ pub fn pretty_print_input(sess: Session, }; let src_name = driver::source_name(input); - let src = sess.codemap().get_filemap(src_name[]) + let src = sess.codemap().get_filemap(src_name.index(&FullRange)) .src.as_bytes().to_vec(); let mut rdr = MemReader::new(src); @@ -548,7 +548,7 @@ pub fn pretty_print_input(sess: Session, (PpmSource(s), None) => s.call_with_pp_support( sess, ast_map, &arenas, id, out, |annotation, out| { - debug!("pretty printing source code {}", s); + debug!("pretty printing source code {:?}", s); let sess = annotation.sess(); pprust::print_crate(sess.codemap(), sess.diagnostic(), @@ -563,7 +563,7 @@ pub fn pretty_print_input(sess: Session, (PpmSource(s), Some(uii)) => s.call_with_pp_support( sess, ast_map, &arenas, id, (out,uii), |annotation, (out,uii)| { - debug!("pretty printing source code {}", s); + debug!("pretty printing source code {:?}", s); let sess = annotation.sess(); let ast_map = annotation.ast_map() .expect("--pretty missing ast_map"); @@ -586,10 +586,10 @@ pub fn pretty_print_input(sess: Session, }), (PpmFlowGraph, opt_uii) => { - debug!("pretty printing flow graph for {}", opt_uii); + debug!("pretty printing flow graph for {:?}", opt_uii); let uii = opt_uii.unwrap_or_else(|| { sess.fatal(format!("`pretty flowgraph=..` needs NodeId (int) or - unique path suffix (b::c::d)")[]) + unique path suffix (b::c::d)").index(&FullRange)) }); let ast_map = ast_map.expect("--pretty flowgraph missing ast_map"); @@ -597,7 +597,7 @@ pub fn pretty_print_input(sess: Session, let node = ast_map.find(nodeid).unwrap_or_else(|| { sess.fatal(format!("--pretty flowgraph couldn't find id: {}", - nodeid)[]) + nodeid).index(&FullRange)) }); let code = blocks::Code::from_node(node); @@ -609,14 +609,14 @@ pub fn pretty_print_input(sess: Session, } None => { let message = format!("--pretty=flowgraph needs \ - block, fn, or method; got {}", + block, fn, or method; got {:?}", node); // point to what was found, if there's an // accessible span. match ast_map.opt_span(nodeid) { - Some(sp) => sess.span_fatal(sp, message[]), - None => sess.fatal(message[]) + Some(sp) => sess.span_fatal(sp, message.index(&FullRange)), + None => sess.fatal(message.index(&FullRange)) } } } diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index b1e65dce6045a..d301e9c7b5c27 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -279,7 +279,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> { pub fn t_param(&self, space: subst::ParamSpace, index: u32) -> Ty<'tcx> { let name = format!("T{}", index); - ty::mk_param(self.infcx.tcx, space, index, token::intern(name[])) + ty::mk_param(self.infcx.tcx, space, index, token::intern(name.index(&FullRange))) } pub fn re_early_bound(&self, @@ -418,7 +418,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> { self.ty_to_string(t_glb)); match self.glb().tys(t1, t2) { Err(e) => { - panic!("unexpected error computing LUB: {}", e) + panic!("unexpected error computing LUB: {:?}", e) } Ok(t) => { self.assert_eq(t, t_glb); @@ -841,7 +841,7 @@ fn walk_ty_skip_subtree() { let mut walker = uniq_ty.walk(); while let Some(t) = walker.next() { - debug!("walked to {}", t); + debug!("walked to {:?}", t); let (expected_ty, skip) = expected.pop().unwrap(); assert_eq!(t, expected_ty); if skip { walker.skip_current_subtree(); } diff --git a/src/librustc_llvm/lib.rs b/src/librustc_llvm/lib.rs index 8a9334be985f7..0bed754aa3c1b 100644 --- a/src/librustc_llvm/lib.rs +++ b/src/librustc_llvm/lib.rs @@ -21,10 +21,7 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![feature(globs)] #![feature(link_args)] -#![feature(unboxed_closures)] -#![feature(old_orphan_check)] extern crate libc; diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 10788f9f7cb83..ca6b1469f8569 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -221,14 +221,14 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { self.resolve_error(sp, format!("duplicate definition of {} `{}`", namespace_error_to_string(duplicate_type), - token::get_name(name))[]); + token::get_name(name)).index(&FullRange)); { let r = child.span_for_namespace(ns); for sp in r.iter() { self.session.span_note(*sp, format!("first definition of {} `{}` here", namespace_error_to_string(duplicate_type), - token::get_name(name))[]); + token::get_name(name)).index(&FullRange)); } } } @@ -845,7 +845,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { name: Name, new_parent: &Rc) { debug!("(building reduced graph for \ - external crate) building external def, priv {}", + external crate) building external def, priv {:?}", vis); let is_public = vis == ast::Public; let modifiers = if is_public { PUBLIC } else { DefModifiers::empty() } | IMPORTABLE; @@ -989,7 +989,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { DefLocal(..) | DefPrimTy(..) | DefTyParam(..) | DefUse(..) | DefUpvar(..) | DefRegion(..) | DefTyParamBinder(..) | DefLabel(..) | DefSelfTy(..) => { - panic!("didn't expect `{}`", def); + panic!("didn't expect `{:?}`", def); } } } @@ -1201,7 +1201,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { debug!("(building import directive) building import \ directive: {}::{}", self.names_to_string(module_.imports.borrow().last().unwrap() - .module_path[]), + .module_path.index(&FullRange)), token::get_name(target)); let mut import_resolutions = module_.import_resolutions diff --git a/src/librustc_resolve/check_unused.rs b/src/librustc_resolve/check_unused.rs index 26b1058d18341..18066a7b94bd8 100644 --- a/src/librustc_resolve/check_unused.rs +++ b/src/librustc_resolve/check_unused.rs @@ -58,7 +58,7 @@ impl<'a, 'b, 'tcx> UnusedImportCheckVisitor<'a, 'b, 'tcx> { // public or private item, we will check the correct thing, dependent on how the import // is used. fn finalize_import(&mut self, id: ast::NodeId, span: Span) { - debug!("finalizing import uses for {}", + debug!("finalizing import uses for {:?}", self.session.codemap().span_to_snippet(span)); if !self.used_imports.contains(&(id, TypeNS)) && diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 58102fe5629d9..93ad69e03b17f 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -16,26 +16,11 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![feature(globs, phase, slicing_syntax)] +#![feature(slicing_syntax)] #![feature(rustc_diagnostic_macros)] -#![feature(associated_types)] -#![feature(old_orphan_check)] -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate log; - -#[cfg(not(stage0))] -#[macro_use] -extern crate log; - -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate syntax; - -#[cfg(not(stage0))] -#[macro_use] -extern crate syntax; +#[macro_use] extern crate log; +#[macro_use] extern crate syntax; extern crate rustc; @@ -86,9 +71,9 @@ use syntax::ast::{PolyTraitRef, PrimTy, SelfExplicit}; use syntax::ast::{RegionTyParamBound, StructField}; use syntax::ast::{TraitRef, TraitTyParamBound}; use syntax::ast::{Ty, TyBool, TyChar, TyF32}; -use syntax::ast::{TyF64, TyFloat, TyI, TyI8, TyI16, TyI32, TyI64, TyInt, TyObjectSum}; +use syntax::ast::{TyF64, TyFloat, TyIs, TyI8, TyI16, TyI32, TyI64, TyInt, TyObjectSum}; use syntax::ast::{TyParam, TyParamBound, TyPath, TyPtr, TyPolyTraitRef, TyQPath}; -use syntax::ast::{TyRptr, TyStr, TyU, TyU8, TyU16, TyU32, TyU64, TyUint}; +use syntax::ast::{TyRptr, TyStr, TyUs, TyU8, TyU16, TyU32, TyU64, TyUint}; use syntax::ast::{TypeImplItem}; use syntax::ast; use syntax::ast_map; @@ -548,7 +533,7 @@ impl Module { impl fmt::Show for Module { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}, kind: {}, {}", + write!(f, "{:?}, kind: {:?}, {}", self.def_id, self.kind, if self.is_public { "public" } else { "private" } ) @@ -689,7 +674,7 @@ impl NameBindings { /// Records a type definition. fn define_type(&self, def: Def, sp: Span, modifiers: DefModifiers) { - debug!("defining type for def {} with modifiers {}", def, modifiers); + debug!("defining type for def {:?} with modifiers {:?}", def, modifiers); // Merges the type with the existing type def or creates a new one. let type_def = self.type_def.borrow().clone(); match type_def { @@ -714,7 +699,7 @@ impl NameBindings { /// Records a value definition. fn define_value(&self, def: Def, sp: Span, modifiers: DefModifiers) { - debug!("defining value for def {} with modifiers {}", def, modifiers); + debug!("defining value for def {:?} with modifiers {:?}", def, modifiers); *self.value_def.borrow_mut() = Some(ValueNsDef { def: def, value_span: Some(sp), @@ -833,13 +818,15 @@ impl PrimitiveTypeTable { table.intern("char", TyChar); table.intern("f32", TyFloat(TyF32)); table.intern("f64", TyFloat(TyF64)); - table.intern("int", TyInt(TyI)); + table.intern("int", TyInt(TyIs)); + table.intern("isize", TyInt(TyIs)); table.intern("i8", TyInt(TyI8)); table.intern("i16", TyInt(TyI16)); table.intern("i32", TyInt(TyI32)); table.intern("i64", TyInt(TyI64)); table.intern("str", TyStr); - table.intern("uint", TyUint(TyU)); + table.intern("uint", TyUint(TyUs)); + table.intern("usize", TyUint(TyUs)); table.intern("u8", TyUint(TyU8)); table.intern("u16", TyUint(TyU16)); table.intern("u32", TyUint(TyU32)); @@ -1071,10 +1058,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("unresolved import `{}`{}", self.import_path_to_string( import_directive.module_path - [], + .index(&FullRange), import_directive.subclass), help); - self.resolve_error(span, msg[]); + self.resolve_error(span, msg.index(&FullRange)); } Indeterminate => break, // Bail out. We'll come around next time. Success(()) => () // Good. Continue. @@ -1104,7 +1091,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { .iter() .map(|seg| seg.identifier.name) .collect(); - self.names_to_string(names[]) + self.names_to_string(names.index(&FullRange)) } fn import_directive_subclass_to_string(&mut self, @@ -1168,7 +1155,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let module_path = &import_directive.module_path; debug!("(resolving import for module) resolving import `{}::...` in `{}`", - self.names_to_string(module_path[]), + self.names_to_string(module_path.index(&FullRange)), self.module_to_string(&*module_)); // First, resolve the module path for the directive, if necessary. @@ -1177,7 +1164,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { Some((self.graph_root.get_module(), LastMod(AllPublic))) } else { match self.resolve_module_path(module_.clone(), - module_path[], + module_path.index(&FullRange), DontUseLexicalScope, import_directive.span, ImportSearch) { @@ -1272,7 +1259,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { lp: LastPrivate) -> ResolveResult<()> { debug!("(resolving single import) resolving `{}` = `{}::{}` from \ - `{}` id {}, last private {}", + `{}` id {}, last private {:?}", token::get_name(target), self.module_to_string(&*containing_module), token::get_name(source), @@ -1375,7 +1362,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { shadowable: _ }) => { debug!("(resolving single import) found \ - import in ns {}", namespace); + import in ns {:?}", namespace); let id = import_resolution.id(namespace); // track used imports and extern crates as well this.used_imports.insert((id, namespace)); @@ -1484,7 +1471,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { match *result { BoundResult(ref target_module, ref name_bindings) => { - debug!("(resolving single import) found {} target: {}", + debug!("(resolving single import) found {:?} target: {:?}", namespace_name, name_bindings.def_for_namespace(namespace)); self.check_for_conflicting_import( @@ -1508,7 +1495,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } UnboundResult => { /* Continue. */ } UnknownResult => { - panic!("{} result should be known at this point", namespace_name); + panic!("{:?} result should be known at this point", namespace_name); } } }; @@ -1701,7 +1688,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let is_public = import_directive.is_public; let mut import_resolutions = module_.import_resolutions.borrow_mut(); - let dest_import_resolution = import_resolutions.entry(&name).get().unwrap_or_else( + let dest_import_resolution = import_resolutions.entry(name).get().unwrap_or_else( |vacant_entry| { // Create a new import resolution from this child. vacant_entry.insert(ImportResolution::new(id, is_public)) @@ -1774,7 +1761,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ValueNS => "value", }, token::get_name(name).get()); - self.session.span_err(import_span, msg[]); + self.session.span_err(import_span, msg.index(&FullRange)); } Some(_) | None => {} } @@ -1789,7 +1776,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { if !name_bindings.defined_in_namespace_with(namespace, IMPORTABLE) { let msg = format!("`{}` is not directly importable", token::get_name(name)); - self.session.span_err(import_span, msg[]); + self.session.span_err(import_span, msg.index(&FullRange)); } } @@ -1814,7 +1801,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { crate in this module \ (maybe you meant `use {0}::*`?)", token::get_name(name).get()); - self.session.span_err(import_span, msg[]); + self.session.span_err(import_span, msg.index(&FullRange)); } Some(_) | None => {} } @@ -1836,7 +1823,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("import `{}` conflicts with value \ in this module", token::get_name(name).get()); - self.session.span_err(import_span, msg[]); + self.session.span_err(import_span, msg.index(&FullRange)); if let Some(span) = value.value_span { self.session.span_note(span, "conflicting value here"); @@ -1854,7 +1841,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("import `{}` conflicts with type in \ this module", token::get_name(name).get()); - self.session.span_err(import_span, msg[]); + self.session.span_err(import_span, msg.index(&FullRange)); if let Some(span) = ty.type_span { self.session.span_note(span, "note conflicting type here") @@ -1867,7 +1854,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("inherent implementations \ are only allowed on types \ defined in the current module"); - self.session.span_err(span, msg[]); + self.session.span_err(span, msg.index(&FullRange)); self.session.span_note(import_span, "import from other module here") } @@ -1876,7 +1863,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("import `{}` conflicts with existing \ submodule", token::get_name(name).get()); - self.session.span_err(import_span, msg[]); + self.session.span_err(import_span, msg.index(&FullRange)); if let Some(span) = ty.type_span { self.session.span_note(span, "note conflicting module here") @@ -1906,7 +1893,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { .span_err(span, format!("an external crate named `{}` has already \ been imported into this module", - token::get_name(name).get())[]); + token::get_name(name).get()).index(&FullRange)); } } @@ -1925,7 +1912,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { format!("the name `{}` conflicts with an external \ crate that has been imported into this \ module", - token::get_name(name).get())[]); + token::get_name(name).get()).index(&FullRange)); } } @@ -1973,7 +1960,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let segment_name = token::get_name(name); let module_name = self.module_to_string(&*search_module); let mut span = span; - let msg = if "???" == module_name[] { + let msg = if "???" == module_name.index(&FullRange) { span.hi = span.lo + Pos::from_uint(segment_name.get().len()); match search_parent_externals(name, @@ -2086,14 +2073,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { match module_prefix_result { Failed(None) => { let mpath = self.names_to_string(module_path); - let mpath = mpath[]; + let mpath = mpath.index(&FullRange); match mpath.rfind(':') { Some(idx) => { let msg = format!("Could not find `{}` in `{}`", // idx +- 1 to account for the // colons on either side - mpath[idx + 1..], - mpath[0..idx - 1]); + mpath.index(&((idx + 1)..)), + mpath.index(&(0..(idx - 1)))); return Failed(Some((span, msg))); }, None => { @@ -2165,7 +2152,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { namespace: Namespace) -> ResolveResult<(Target, bool)> { debug!("(resolving item in lexical scope) resolving `{}` in \ - namespace {} in `{}`", + namespace {:?} in `{}`", token::get_name(name), namespace, self.module_to_string(&*module_)); @@ -2195,7 +2182,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { None => { // Not found; continue. debug!("(resolving item in lexical scope) found \ - import resolution, but not in namespace {}", + import resolution, but not in namespace {:?}", namespace); } Some(target) => { @@ -2268,7 +2255,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { true) { Failed(Some((span, msg))) => self.resolve_error(span, format!("failed to resolve. {}", - msg)[]), + msg).index(&FullRange)), Failed(None) => (), // Continue up the search chain. Indeterminate => { // We couldn't see through the higher scope because of an @@ -2475,7 +2462,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { match import_resolution.target_for_namespace(namespace) { None => { debug!("(resolving name in module) name found, \ - but not in namespace {}", + but not in namespace {:?}", namespace); } Some(target) => { @@ -2528,7 +2515,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } else { let err = format!("unresolved import (maybe you meant `{}::*`?)", sn); - self.resolve_error((*imports)[index].span, err[]); + self.resolve_error((*imports)[index].span, err.index(&FullRange)); } } @@ -2620,7 +2607,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { match def_like { DlDef(d @ DefUpvar(..)) => { self.session.span_bug(span, - format!("unexpected {} in bindings", d)[]) + format!("unexpected {:?} in bindings", d).index(&FullRange)) } DlDef(d @ DefLocal(_)) => { let node_id = d.def_id().node; @@ -2639,14 +2626,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { def = DefUpvar(node_id, function_id, last_proc_body_id); let mut seen = self.freevars_seen.borrow_mut(); - let seen = match seen.entry(&function_id) { + let seen = match seen.entry(function_id) { Occupied(v) => v.into_mut(), Vacant(v) => v.insert(NodeSet::new()), }; if seen.contains(&node_id) { continue; } - match self.freevars.borrow_mut().entry(&function_id) { + match self.freevars.borrow_mut().entry(function_id) { Occupied(v) => v.into_mut(), Vacant(v) => v.insert(vec![]), }.push(Freevar { def: prev_def, span: span }); @@ -2766,7 +2753,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { for (i, rib) in ribs.iter().enumerate().rev() { match rib.bindings.get(&name).cloned() { Some(def_like) => { - return self.upvarify(ribs[i + 1..], def_like, span); + return self.upvarify(ribs.index(&((i + 1)..)), def_like, span); } None => { // Continue. @@ -2859,7 +2846,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { generics, implemented_traits, &**self_type, - impl_items[]); + impl_items.index(&FullRange)); } ItemTrait(_, ref generics, ref bounds, ref trait_items) => { @@ -2937,7 +2924,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ItemStruct(ref struct_def, ref generics) => { self.resolve_struct(item.id, generics, - struct_def.fields[]); + struct_def.fields.index(&FullRange)); } ItemMod(ref module_) => { @@ -3010,7 +2997,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { parameter in this type \ parameter list", token::get_name( - name))[]) + name)).index(&FullRange)) } seen_bindings.insert(name); @@ -3182,26 +3169,26 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { }; let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str); - self.resolve_error(trait_reference.path.span, msg[]); + self.resolve_error(trait_reference.path.span, msg.index(&FullRange)); } Some(def) => { match def { (DefTrait(_), _) => { - debug!("(resolving trait) found trait def: {}", def); + debug!("(resolving trait) found trait def: {:?}", def); self.record_def(trait_reference.ref_id, def); } (def, _) => { self.resolve_error(trait_reference.path.span, format!("`{}` is not a trait", self.path_names_to_string( - &trait_reference.path))[]); + &trait_reference.path)).index(&FullRange)); // If it's a typedef, give a note if let DefTy(..) = def { self.session.span_note( trait_reference.path.span, format!("`type` aliases cannot be used for traits") - []); + .index(&FullRange)); } } } @@ -3398,7 +3385,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.resolve_error(span, format!("method `{}` is not a member of trait `{}`", token::get_name(name), - path_str)[]); + path_str).index(&FullRange)); } } } @@ -3467,7 +3454,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { format!("variable `{}` from pattern #1 is \ not bound in pattern #{}", token::get_name(key), - i + 1)[]); + i + 1).index(&FullRange)); } Some(binding_i) => { if binding_0.binding_mode != binding_i.binding_mode { @@ -3476,7 +3463,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { format!("variable `{}` is bound with different \ mode in pattern #{} than in pattern #1", token::get_name(key), - i + 1)[]); + i + 1).index(&FullRange)); } } } @@ -3489,7 +3476,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { format!("variable `{}` from pattern {}{} is \ not bound in pattern {}1", token::get_name(key), - "#", i + 1, "#")[]); + "#", i + 1, "#").index(&FullRange)); } } } @@ -3578,8 +3565,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { None => { match self.resolve_path(ty.id, path, TypeNS, true) { Some(def) => { - debug!("(resolving type) resolved `{}` to \ - type {}", + debug!("(resolving type) resolved `{:?}` to \ + type {:?}", token::get_ident(path.segments.last().unwrap() .identifier), def); result_def = Some(def); @@ -3604,7 +3591,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { None => { let msg = format!("use of undeclared type name `{}`", self.path_names_to_string(path)); - self.resolve_error(ty.span, msg[]); + self.resolve_error(ty.span, msg.index(&FullRange)); } } } @@ -3676,7 +3663,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { format!("declaration of `{}` shadows an enum \ variant or unit-like struct in \ scope", - token::get_name(renamed))[]); + token::get_name(renamed)).index(&FullRange)); } FoundConst(ref def, lp) if mode == RefutableMode => { debug!("(resolving pattern) resolving `{}` to \ @@ -3728,7 +3715,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { list", token::get_ident( ident)) - []) + .index(&FullRange)) } else if bindings_list.get(&renamed) == Some(&pat_id) { // Then this is a duplicate variable in the @@ -3737,7 +3724,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { format!("identifier `{}` is bound \ more than once in the same \ pattern", - token::get_ident(ident))[]); + token::get_ident(ident)).index(&FullRange)); } // Else, not bound in the same pattern: do // nothing. @@ -3763,13 +3750,13 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.resolve_error(path.span, format!("`{}` is not an enum variant, struct or const", token::get_ident( - path.segments.last().unwrap().identifier))[]); + path.segments.last().unwrap().identifier)).as_slice()); } None => { self.resolve_error(path.span, format!("unresolved enum variant, struct or const `{}`", token::get_ident( - path.segments.last().unwrap().identifier))[]); + path.segments.last().unwrap().identifier)).as_slice()); } } @@ -3797,10 +3784,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } result => { debug!("(resolving pattern) didn't find struct \ - def: {}", result); + def: {:?}", result); let msg = format!("`{}` does not name a structure", self.path_names_to_string(path)); - self.resolve_error(path.span, msg[]); + self.resolve_error(path.span, msg.index(&FullRange)); } } } @@ -3821,7 +3808,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ValueNS) { Success((target, _)) => { debug!("(resolve bare identifier pattern) succeeded in \ - finding {} at {}", + finding {} at {:?}", token::get_name(name), target.bindings.value_def.borrow()); match *target.bindings.value_def.borrow() { @@ -3862,7 +3849,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { match err { Some((span, msg)) => { self.resolve_error(span, format!("failed to resolve: {}", - msg)[]); + msg).index(&FullRange)); } None => () } @@ -4057,7 +4044,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let last_private; let module = self.current_module.clone(); match self.resolve_module_path(module, - module_path[], + module_path.index(&FullRange), UseLexicalScope, path.span, PathSearch) { @@ -4072,7 +4059,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { }; self.resolve_error(span, format!("failed to resolve. {}", - msg)[]); + msg).index(&FullRange)); return None; } Indeterminate => panic!("indeterminate unexpected"), @@ -4115,7 +4102,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let containing_module; let last_private; match self.resolve_module_path_from_root(root_module, - module_path[], + module_path.index(&FullRange), 0, path.span, PathSearch, @@ -4125,13 +4112,13 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { Some((span, msg)) => (span, msg), None => { let msg = format!("Use of undeclared module `::{}`", - self.names_to_string(module_path[])); + self.names_to_string(module_path.index(&FullRange))); (path.span, msg) } }; self.resolve_error(span, format!("failed to resolve. {}", - msg)[]); + msg).index(&FullRange)); return None; } @@ -4172,14 +4159,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } TypeNS => { let name = ident.name; - self.search_ribs(self.type_ribs[], name, span) + self.search_ribs(self.type_ribs.index(&FullRange), name, span) } }; match search_result { Some(DlDef(def)) => { debug!("(resolving path in local ribs) resolved `{}` to \ - local: {}", + local: {:?}", token::get_ident(ident), def); return Some(def); @@ -4227,7 +4214,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { match err { Some((span, msg)) => self.resolve_error(span, format!("failed to resolve. {}", - msg)[]), + msg).index(&FullRange)), None => () } @@ -4284,7 +4271,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } else { match this.resolve_module_path(root, - name_path[], + name_path.index(&FullRange), UseLexicalScope, span, PathSearch) { @@ -4322,7 +4309,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let name_path = path.segments.iter().map(|seg| seg.identifier.name).collect::>(); // Look for a method in the current self type's impl module. - match get_module(self, path.span, name_path[]) { + match get_module(self, path.span, name_path.index(&FullRange)) { Some(module) => match module.children.borrow().get(&name) { Some(binding) => { let p_str = self.path_names_to_string(&path); @@ -4530,10 +4517,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { Some(definition) => self.record_def(expr.id, definition), result => { debug!("(resolving expression) didn't find struct \ - def: {}", result); + def: {:?}", result); let msg = format!("`{}` does not name a structure", self.path_names_to_string(path)); - self.resolve_error(path.span, msg[]); + self.resolve_error(path.span, msg.index(&FullRange)); } } @@ -4594,7 +4581,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.resolve_error( expr.span, format!("use of undeclared label `{}`", - token::get_ident(label))[]) + token::get_ident(label)).index(&FullRange)) } Some(DlDef(def @ DefLabel(_))) => { // Since this def is a label, it is never read. @@ -4717,23 +4704,23 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } fn record_def(&mut self, node_id: NodeId, (def, lp): (Def, LastPrivate)) { - debug!("(recording def) recording {} for {}, last private {}", + debug!("(recording def) recording {:?} for {}, last private {:?}", def, node_id, lp); assert!(match lp {LastImport{..} => false, _ => true}, "Import should only be used for `use` directives"); self.last_private.insert(node_id, lp); - match self.def_map.borrow_mut().entry(&node_id) { + match self.def_map.borrow_mut().entry(node_id) { // Resolve appears to "resolve" the same ID multiple // times, so here is a sanity check it at least comes to // the same conclusion! - nmatsakis Occupied(entry) => if def != *entry.get() { self.session - .bug(format!("node_id {} resolved first to {} and \ - then {}", + .bug(format!("node_id {} resolved first to {:?} and \ + then {:?}", node_id, *entry.get(), - def)[]); + def).index(&FullRange)); }, Vacant(entry) => { entry.insert(def); }, } @@ -4749,7 +4736,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.resolve_error(pat.span, format!("cannot use `ref` binding mode \ with {}", - descr)[]); + descr).index(&FullRange)); } } } @@ -4785,7 +4772,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { return "???".to_string(); } self.names_to_string(names.into_iter().rev() - .collect::>()[]) + .collect::>().index(&FullRange)) } #[allow(dead_code)] // useful for debugging diff --git a/src/librustc_resolve/record_exports.rs b/src/librustc_resolve/record_exports.rs index 84fd3c936719b..67bcf152eb78c 100644 --- a/src/librustc_resolve/record_exports.rs +++ b/src/librustc_resolve/record_exports.rs @@ -117,7 +117,7 @@ impl<'a, 'b, 'tcx> ExportRecorder<'a, 'b, 'tcx> { ns: Namespace) { match namebindings.def_for_namespace(ns) { Some(d) => { - debug!("(computing exports) YES: export '{}' => {}", + debug!("(computing exports) YES: export '{}' => {:?}", name, d.def_id()); exports.push(Export { name: name, @@ -125,7 +125,7 @@ impl<'a, 'b, 'tcx> ExportRecorder<'a, 'b, 'tcx> { }); } d_opt => { - debug!("(computing exports) NO: {}", d_opt); + debug!("(computing exports) NO: {:?}", d_opt); } } } diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index 8fbeadc55b387..26241ace76f48 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -128,7 +128,7 @@ pub fn find_crate_name(sess: Option<&Session>, attrs: &[ast::Attribute], input: &Input) -> String { let validate = |&: s: String, span: Option| { - creader::validate_crate_name(sess, s[], span); + creader::validate_crate_name(sess, s.index(&FullRange), span); s }; @@ -146,7 +146,7 @@ pub fn find_crate_name(sess: Option<&Session>, let msg = format!("--crate-name and #[crate_name] are \ required to match, but `{}` != `{}`", s, name); - sess.span_err(attr.span, msg[]); + sess.span_err(attr.span, msg.index(&FullRange)); } } return validate(s.clone(), None); @@ -171,7 +171,7 @@ pub fn build_link_meta(sess: &Session, krate: &ast::Crate, crate_name: name, crate_hash: Svh::calculate(&sess.opts.cg.metadata, krate), }; - info!("{}", r); + info!("{:?}", r); return r; } @@ -192,17 +192,17 @@ fn symbol_hash<'tcx>(tcx: &ty::ctxt<'tcx>, // to be independent of one another in the crate. symbol_hasher.reset(); - symbol_hasher.input_str(link_meta.crate_name[]); + symbol_hasher.input_str(link_meta.crate_name.index(&FullRange)); symbol_hasher.input_str("-"); symbol_hasher.input_str(link_meta.crate_hash.as_str()); for meta in tcx.sess.crate_metadata.borrow().iter() { - symbol_hasher.input_str(meta[]); + symbol_hasher.input_str(meta.index(&FullRange)); } symbol_hasher.input_str("-"); - symbol_hasher.input_str(encoder::encoded_ty(tcx, t)[]); + symbol_hasher.input_str(encoder::encoded_ty(tcx, t).index(&FullRange)); // Prefix with 'h' so that it never blends into adjacent digits let mut hash = String::from_str("h"); - hash.push_str(truncated_hash_result(symbol_hasher)[]); + hash.push_str(truncated_hash_result(symbol_hasher).index(&FullRange)); hash } @@ -251,7 +251,7 @@ pub fn sanitize(s: &str) -> String { let mut tstr = String::new(); for c in c.escape_unicode() { tstr.push(c) } result.push('$'); - result.push_str(tstr[1..]); + result.push_str(tstr.index(&(1..))); } } } @@ -260,7 +260,7 @@ pub fn sanitize(s: &str) -> String { if result.len() > 0u && result.as_bytes()[0] != '_' as u8 && ! (result.as_bytes()[0] as char).is_xid_start() { - return format!("_{}", result[]); + return format!("_{}", result.index(&FullRange)); } return result; @@ -286,12 +286,12 @@ pub fn mangle>(mut path: PI, fn push(n: &mut String, s: &str) { let sani = sanitize(s); - n.push_str(format!("{}{}", sani.len(), sani)[]); + n.push_str(format!("{}{}", sani.len(), sani).index(&FullRange)); } // First, connect each component with pairs. for e in path { - push(&mut n, token::get_name(e.name()).get()[]) + push(&mut n, token::get_name(e.name()).get().index(&FullRange)) } match hash { @@ -329,17 +329,17 @@ pub fn mangle_exported_name<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, path: PathEl hash.push(EXTRA_CHARS.as_bytes()[extra2] as char); hash.push(EXTRA_CHARS.as_bytes()[extra3] as char); - exported_name(path, hash[]) + exported_name(path, hash.index(&FullRange)) } pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, name: &str) -> String { let s = ppaux::ty_to_string(ccx.tcx(), t); - let path = [PathName(token::intern(s[])), + let path = [PathName(token::intern(s.index(&FullRange))), gensym_name(name)]; let hash = get_symbol_hash(ccx, t); - mangle(ast_map::Values(path.iter()), Some(hash[])) + mangle(ast_map::Values(path.iter()), Some(hash.index(&FullRange))) } pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String { @@ -359,7 +359,7 @@ pub fn remove(sess: &Session, path: &Path) { Err(e) => { sess.err(format!("failed to remove {}: {}", path.display(), - e)[]); + e).index(&FullRange)); } } } @@ -373,8 +373,8 @@ pub fn link_binary(sess: &Session, let mut out_filenames = Vec::new(); for &crate_type in sess.crate_types.borrow().iter() { if invalid_output_for_target(sess, crate_type) { - sess.bug(format!("invalid output type `{}` for target os `{}`", - crate_type, sess.opts.target_triple)[]); + sess.bug(format!("invalid output type `{:?}` for target os `{}`", + crate_type, sess.opts.target_triple).index(&FullRange)); } let out_file = link_binary_output(sess, trans, crate_type, outputs, crate_name); @@ -439,8 +439,8 @@ pub fn filename_for_input(sess: &Session, out_filename.with_filename(format!("lib{}.rlib", libname)) } config::CrateTypeDylib => { - let (prefix, suffix) = (sess.target.target.options.dll_prefix[], - sess.target.target.options.dll_suffix[]); + let (prefix, suffix) = (sess.target.target.options.dll_prefix.index(&FullRange), + sess.target.target.options.dll_suffix.index(&FullRange)); out_filename.with_filename(format!("{}{}{}", prefix, libname, @@ -450,7 +450,7 @@ pub fn filename_for_input(sess: &Session, out_filename.with_filename(format!("lib{}.a", libname)) } config::CrateTypeExecutable => { - let suffix = sess.target.target.options.exe_suffix[]; + let suffix = sess.target.target.options.exe_suffix.index(&FullRange); out_filename.with_filename(format!("{}{}", libname, suffix)) } } @@ -479,12 +479,12 @@ fn link_binary_output(sess: &Session, if !out_is_writeable { sess.fatal(format!("output file {} is not writeable -- check its \ permissions.", - out_filename.display())[]); + out_filename.display()).index(&FullRange)); } else if !obj_is_writeable { sess.fatal(format!("object file {} is not writeable -- check its \ permissions.", - obj_filename.display())[]); + obj_filename.display()).index(&FullRange)); } match crate_type { @@ -539,7 +539,7 @@ fn link_rlib<'a>(sess: &'a Session, for &(ref l, kind) in sess.cstore.get_used_libraries().borrow().iter() { match kind { cstore::NativeStatic => { - ab.add_native_library(l[]).unwrap(); + ab.add_native_library(l.index(&FullRange)).unwrap(); } cstore::NativeFramework | cstore::NativeUnknown => {} } @@ -587,12 +587,12 @@ fn link_rlib<'a>(sess: &'a Session, let tmpdir = TempDir::new("rustc").ok().expect("needs a temp dir"); let metadata = tmpdir.path().join(METADATA_FILENAME); match fs::File::create(&metadata).write(trans.metadata - []) { + .index(&FullRange)) { Ok(..) => {} Err(e) => { sess.err(format!("failed to write {}: {}", metadata.display(), - e)[]); + e).index(&FullRange)); sess.abort_if_errors(); } } @@ -608,27 +608,27 @@ fn link_rlib<'a>(sess: &'a Session, // extension to it. This is to work around a bug in LLDB that // would cause it to crash if the name of a file in an archive // was exactly 16 bytes. - let bc_filename = obj_filename.with_extension(format!("{}.bc", i)[]); + let bc_filename = obj_filename.with_extension(format!("{}.bc", i).as_slice()); let bc_deflated_filename = obj_filename.with_extension( - format!("{}.bytecode.deflate", i)[]); + format!("{}.bytecode.deflate", i).index(&FullRange)); let bc_data = match fs::File::open(&bc_filename).read_to_end() { Ok(buffer) => buffer, Err(e) => sess.fatal(format!("failed to read bytecode: {}", - e)[]) + e).index(&FullRange)) }; - let bc_data_deflated = match flate::deflate_bytes(bc_data[]) { + let bc_data_deflated = match flate::deflate_bytes(bc_data.index(&FullRange)) { Some(compressed) => compressed, None => sess.fatal(format!("failed to compress bytecode from {}", - bc_filename.display())[]) + bc_filename.display()).index(&FullRange)) }; let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) { Ok(file) => file, Err(e) => { sess.fatal(format!("failed to create compressed bytecode \ - file: {}", e)[]) + file: {}", e).index(&FullRange)) } }; @@ -637,7 +637,7 @@ fn link_rlib<'a>(sess: &'a Session, Ok(()) => {} Err(e) => { sess.err(format!("failed to write compressed bytecode: \ - {}", e)[]); + {}", e).index(&FullRange)); sess.abort_if_errors() } }; @@ -677,7 +677,7 @@ fn write_rlib_bytecode_object_v1(writer: &mut T, try! { writer.write(RLIB_BYTECODE_OBJECT_MAGIC) }; try! { writer.write_le_u32(1) }; try! { writer.write_le_u64(bc_data_deflated_size) }; - try! { writer.write(bc_data_deflated[]) }; + try! { writer.write(bc_data_deflated.index(&FullRange)) }; let number_of_bytes_written_so_far = RLIB_BYTECODE_OBJECT_MAGIC.len() + // magic id @@ -728,11 +728,11 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { let p = match *path { Some(ref p) => p.clone(), None => { sess.err(format!("could not find rlib for: `{}`", - name)[]); + name).index(&FullRange)); continue } }; - ab.add_rlib(&p, name[], sess.lto()).unwrap(); + ab.add_rlib(&p, name.index(&FullRange), sess.lto()).unwrap(); let native_libs = csearch::get_native_libraries(&sess.cstore, cnum); all_native_libs.extend(native_libs.into_iter()); @@ -754,7 +754,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { cstore::NativeUnknown => "library", cstore::NativeFramework => "framework", }; - sess.note(format!("{}: {}", name, *lib)[]); + sess.note(format!("{}: {}", name, *lib).index(&FullRange)); } } @@ -768,12 +768,12 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, // The invocations of cc share some flags across platforms let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname[]); + let mut cmd = Command::new(pname.index(&FullRange)); - cmd.args(sess.target.target.options.pre_link_args[]); + cmd.args(sess.target.target.options.pre_link_args.index(&FullRange)); link_args(&mut cmd, sess, dylib, tmpdir.path(), trans, obj_filename, out_filename); - cmd.args(sess.target.target.options.post_link_args[]); + cmd.args(sess.target.target.options.post_link_args.index(&FullRange)); if !sess.target.target.options.no_compiler_rt { cmd.arg("-lcompiler-rt"); } @@ -793,11 +793,11 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, if !prog.status.success() { sess.err(format!("linking with `{}` failed: {}", pname, - prog.status)[]); - sess.note(format!("{}", &cmd)[]); + prog.status).index(&FullRange)); + sess.note(format!("{}", &cmd).index(&FullRange)); let mut output = prog.error.clone(); - output.push_all(prog.output[]); - sess.note(str::from_utf8(output[]).unwrap()); + output.push_all(prog.output.index(&FullRange)); + sess.note(str::from_utf8(output.index(&FullRange)).unwrap()); sess.abort_if_errors(); } debug!("linker stderr:\n{}", String::from_utf8(prog.error).unwrap()); @@ -806,7 +806,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, - e)[]); + e).index(&FullRange)); sess.abort_if_errors(); } } @@ -818,7 +818,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, match Command::new("dsymutil").arg(out_filename).output() { Ok(..) => {} Err(e) => { - sess.err(format!("failed to run dsymutil: {}", e)[]); + sess.err(format!("failed to run dsymutil: {}", e).index(&FullRange)); sess.abort_if_errors(); } } @@ -867,7 +867,7 @@ fn link_args(cmd: &mut Command, let mut v = b"-Wl,-force_load,".to_vec(); v.push_all(morestack.as_vec()); - cmd.arg(v[]); + cmd.arg(v.index(&FullRange)); } else { cmd.args(&["-Wl,--whole-archive", "-lmorestack", "-Wl,--no-whole-archive"]); } @@ -992,7 +992,7 @@ fn link_args(cmd: &mut Command, if sess.opts.cg.rpath { let mut v = "-Wl,-install_name,@rpath/".as_bytes().to_vec(); v.push_all(out_filename.filename().unwrap()); - cmd.arg(v[]); + cmd.arg(v.index(&FullRange)); } } else { cmd.arg("-shared"); @@ -1004,7 +1004,7 @@ fn link_args(cmd: &mut Command, // addl_lib_search_paths if sess.opts.cg.rpath { let sysroot = sess.sysroot(); - let target_triple = sess.opts.target_triple[]; + let target_triple = sess.opts.target_triple.index(&FullRange); let get_install_prefix_lib_path = |:| { let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX"); let tlib = filesearch::relative_target_lib_path(sysroot, target_triple); @@ -1021,14 +1021,14 @@ fn link_args(cmd: &mut Command, get_install_prefix_lib_path: get_install_prefix_lib_path, realpath: ::util::fs::realpath }; - cmd.args(rpath::get_rpath_flags(rpath_config)[]); + cmd.args(rpath::get_rpath_flags(rpath_config).index(&FullRange)); } // Finally add all the linker arguments provided on the command line along // with any #[link_args] attributes found inside the crate let empty = Vec::new(); - cmd.args(sess.opts.cg.link_args.as_ref().unwrap_or(&empty)[]); - cmd.args(used_link_args[]); + cmd.args(sess.opts.cg.link_args.as_ref().unwrap_or(&empty).index(&FullRange)); + cmd.args(used_link_args.index(&FullRange)); } // # Native library linking @@ -1082,14 +1082,14 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) { } else { // -force_load is the OSX equivalent of --whole-archive, but it // involves passing the full path to the library to link. - let lib = archive::find_library(l[], - sess.target.target.options.staticlib_prefix[], - sess.target.target.options.staticlib_suffix[], - search_path[], + let lib = archive::find_library(l.index(&FullRange), + sess.target.target.options.staticlib_prefix.as_slice(), + sess.target.target.options.staticlib_suffix.as_slice(), + search_path.index(&FullRange), &sess.diagnostic().handler); let mut v = b"-Wl,-force_load,".to_vec(); v.push_all(lib.as_vec()); - cmd.arg(v[]); + cmd.arg(v.index(&FullRange)); } } if takes_hints { @@ -1102,7 +1102,7 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) { cmd.arg(format!("-l{}", l)); } cstore::NativeFramework => { - cmd.arg("-framework").arg(l[]); + cmd.arg("-framework").arg(l.index(&FullRange)); } cstore::NativeStatic => unreachable!(), } @@ -1158,7 +1158,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, // Converts a library file-stem into a cc -l argument fn unlib<'a>(config: &config::Config, stem: &'a [u8]) -> &'a [u8] { if stem.starts_with("lib".as_bytes()) && !config.target.options.is_like_windows { - stem[3..] + stem.index(&(3..)) } else { stem } @@ -1183,9 +1183,9 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, // against the archive. if sess.lto() { let name = cratepath.filename_str().unwrap(); - let name = name[3..name.len() - 5]; // chop off lib/.rlib + let name = name.index(&(3..(name.len() - 5))); // chop off lib/.rlib time(sess.time_passes(), - format!("altering {}.rlib", name)[], + format!("altering {}.rlib", name).index(&FullRange), (), |()| { let dst = tmpdir.join(cratepath.filename().unwrap()); match fs::copy(&cratepath, &dst) { @@ -1194,7 +1194,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, sess.err(format!("failed to copy {} to {}: {}", cratepath.display(), dst.display(), - e)[]); + e).index(&FullRange)); sess.abort_if_errors(); } } @@ -1206,7 +1206,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, Err(e) => { sess.err(format!("failed to chmod {} when preparing \ for LTO: {}", dst.display(), - e)[]); + e).index(&FullRange)); sess.abort_if_errors(); } } @@ -1220,9 +1220,9 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, maybe_ar_prog: sess.opts.cg.ar.clone() }; let mut archive = Archive::open(config); - archive.remove_file(format!("{}.o", name)[]); + archive.remove_file(format!("{}.o", name).index(&FullRange)); let files = archive.files(); - if files.iter().any(|s| s[].ends_with(".o")) { + if files.iter().any(|s| s.index(&FullRange).ends_with(".o")) { cmd.arg(dst); } }); @@ -1244,7 +1244,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, let mut v = "-l".as_bytes().to_vec(); v.push_all(unlib(&sess.target, cratepath.filestem().unwrap())); - cmd.arg(v[]); + cmd.arg(v.index(&FullRange)); } } @@ -1286,7 +1286,7 @@ fn add_upstream_native_libraries(cmd: &mut Command, sess: &Session) { } cstore::NativeFramework => { cmd.arg("-framework"); - cmd.arg(lib[]); + cmd.arg(lib.index(&FullRange)); } cstore::NativeStatic => { sess.bug("statics shouldn't be propagated"); diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs index f3e90c43a8414..ecf2e9ed72425 100644 --- a/src/librustc_trans/back/lto.rs +++ b/src/librustc_trans/back/lto.rs @@ -54,21 +54,21 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, Some(p) => p, None => { sess.fatal(format!("could not find rlib for: `{}`", - name)[]); + name).index(&FullRange)); } }; let archive = ArchiveRO::open(&path).expect("wanted an rlib"); let file = path.filename_str().unwrap(); - let file = file[3..file.len() - 5]; // chop off lib/.rlib + let file = file.index(&(3..(file.len() - 5))); // chop off lib/.rlib debug!("reading {}", file); for i in iter::count(0u, 1) { let bc_encoded = time(sess.time_passes(), - format!("check for {}.{}.bytecode.deflate", name, i)[], + format!("check for {}.{}.bytecode.deflate", name, i).as_slice(), (), |_| { archive.read(format!("{}.{}.bytecode.deflate", - file, i)[]) + file, i).index(&FullRange)) }); let bc_encoded = match bc_encoded { Some(data) => data, @@ -76,7 +76,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, if i == 0 { // No bitcode was found at all. sess.fatal(format!("missing compressed bytecode in {}", - path.display())[]); + path.display()).index(&FullRange)); } // No more bitcode files to read. break; @@ -91,20 +91,20 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, if version == 1 { // The only version existing so far let data_size = extract_compressed_bytecode_size_v1(bc_encoded); - let compressed_data = bc_encoded[ + let compressed_data = bc_encoded.index(&( link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET.. - link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET + data_size as uint]; + (link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET + data_size as uint))); match flate::inflate_bytes(compressed_data) { Some(inflated) => inflated, None => { sess.fatal(format!("failed to decompress bc of `{}`", - name)[]) + name).index(&FullRange)) } } } else { sess.fatal(format!("Unsupported bytecode format version {}", - version)[]) + version).index(&FullRange)) } }) } else { @@ -115,7 +115,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, Some(bc) => bc, None => { sess.fatal(format!("failed to decompress bc of `{}`", - name)[]) + name).index(&FullRange)) } } }) @@ -124,7 +124,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, let ptr = bc_decoded.as_slice().as_ptr(); debug!("linking {}, part {}", name, i); time(sess.time_passes(), - format!("ll link {}.{}", name, i)[], + format!("ll link {}.{}", name, i).index(&FullRange), (), |()| unsafe { if !llvm::LLVMRustLinkInExternalBitcode(llmod, @@ -132,7 +132,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, bc_decoded.len() as libc::size_t) { write::llvm_err(sess.diagnostic().handler(), format!("failed to load bc of `{}`", - name[])); + name.index(&FullRange))); } }); } @@ -186,7 +186,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, fn is_versioned_bytecode_format(bc: &[u8]) -> bool { let magic_id_byte_count = link::RLIB_BYTECODE_OBJECT_MAGIC.len(); return bc.len() > magic_id_byte_count && - bc[..magic_id_byte_count] == link::RLIB_BYTECODE_OBJECT_MAGIC; + bc.index(&(0..magic_id_byte_count)) == link::RLIB_BYTECODE_OBJECT_MAGIC; } fn extract_bytecode_format_version(bc: &[u8]) -> u32 { @@ -198,8 +198,8 @@ fn extract_compressed_bytecode_size_v1(bc: &[u8]) -> u64 { } fn read_from_le_bytes(bytes: &[u8], position_in_bytes: uint) -> T { - let byte_data = bytes[position_in_bytes.. - position_in_bytes + mem::size_of::()]; + let byte_data = bytes.index(&(position_in_bytes.. + (position_in_bytes + mem::size_of::()))); let data = unsafe { *(byte_data.as_ptr() as *const T) }; diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index 98e2b4b9dddb5..8a80019143ea3 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -47,14 +47,14 @@ pub fn llvm_err(handler: &diagnostic::Handler, msg: String) -> ! { unsafe { let cstr = llvm::LLVMRustGetLastError(); if cstr == ptr::null() { - handler.fatal(msg[]); + handler.fatal(msg.index(&FullRange)); } else { let err = ffi::c_str_to_bytes(&cstr); let err = String::from_utf8_lossy(err.as_slice()).to_string(); libc::free(cstr as *mut _); handler.fatal(format!("{}: {}", - msg[], - err[])[]); + msg.index(&FullRange), + err.index(&FullRange)).index(&FullRange)); } } } @@ -104,13 +104,13 @@ impl SharedEmitter { match diag.code { Some(ref code) => { handler.emit_with_code(None, - diag.msg[], - code[], + diag.msg.index(&FullRange), + code.index(&FullRange), diag.lvl); }, None => { handler.emit(None, - diag.msg[], + diag.msg.index(&FullRange), diag.lvl); }, } @@ -165,8 +165,8 @@ fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel { fn create_target_machine(sess: &Session) -> TargetMachineRef { let reloc_model_arg = match sess.opts.cg.relocation_model { - Some(ref s) => s[], - None => sess.target.target.options.relocation_model[] + Some(ref s) => s.index(&FullRange), + None => sess.target.target.options.relocation_model.index(&FullRange) }; let reloc_model = match reloc_model_arg { "pic" => llvm::RelocPIC, @@ -174,10 +174,10 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { "default" => llvm::RelocDefault, "dynamic-no-pic" => llvm::RelocDynamicNoPic, _ => { - sess.err(format!("{} is not a valid relocation mode", + sess.err(format!("{:?} is not a valid relocation mode", sess.opts .cg - .relocation_model)[]); + .relocation_model).index(&FullRange)); sess.abort_if_errors(); unreachable!(); } @@ -198,8 +198,8 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { let fdata_sections = ffunction_sections; let code_model_arg = match sess.opts.cg.code_model { - Some(ref s) => s[], - None => sess.target.target.options.code_model[] + Some(ref s) => s.index(&FullRange), + None => sess.target.target.options.code_model.index(&FullRange) }; let code_model = match code_model_arg { @@ -209,16 +209,16 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { "medium" => llvm::CodeModelMedium, "large" => llvm::CodeModelLarge, _ => { - sess.err(format!("{} is not a valid code model", + sess.err(format!("{:?} is not a valid code model", sess.opts .cg - .code_model)[]); + .code_model).index(&FullRange)); sess.abort_if_errors(); unreachable!(); } }; - let triple = sess.target.target.llvm_target[]; + let triple = sess.target.target.llvm_target.index(&FullRange); let tm = unsafe { let triple = CString::from_slice(triple.as_bytes()); @@ -350,13 +350,13 @@ unsafe extern "C" fn inline_asm_handler(diag: SMDiagnosticRef, match cgcx.lto_ctxt { Some((sess, _)) => { sess.codemap().with_expn_info(ExpnId::from_llvm_cookie(cookie), |info| match info { - Some(ei) => sess.span_err(ei.call_site, msg[]), - None => sess.err(msg[]), + Some(ei) => sess.span_err(ei.call_site, msg.index(&FullRange)), + None => sess.err(msg.index(&FullRange)), }); } None => { - cgcx.handler.err(msg[]); + cgcx.handler.err(msg.index(&FullRange)); cgcx.handler.note("build without -C codegen-units for more exact errors"); } } @@ -381,8 +381,8 @@ unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_vo cgcx.handler.note(format!("optimization {} for {} at {}: {}", opt.kind.describe(), pass_name, - if loc.is_empty() { "[unknown]" } else { loc[] }, - llvm::twine_to_string(opt.message))[]); + if loc.is_empty() { "[unknown]" } else { loc.as_slice() }, + llvm::twine_to_string(opt.message)).as_slice()); } } @@ -446,7 +446,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, for pass in config.passes.iter() { let pass = CString::from_slice(pass.as_bytes()); if !llvm::LLVMRustAddPass(mpm, pass.as_ptr()) { - cgcx.handler.warn(format!("unknown pass {}, ignoring", + cgcx.handler.warn(format!("unknown pass {:?}, ignoring", pass).as_slice()); } } @@ -518,14 +518,14 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, } if config.emit_asm { - let path = output_names.with_extension(format!("{}.s", name_extra)[]); + let path = output_names.with_extension(format!("{}.s", name_extra).index(&FullRange)); with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::AssemblyFileType); }); } if config.emit_obj { - let path = output_names.with_extension(format!("{}.o", name_extra)[]); + let path = output_names.with_extension(format!("{}.o", name_extra).index(&FullRange)); with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::ObjectFileType); }); @@ -639,7 +639,7 @@ pub fn run_passes(sess: &Session, // Process the work items, optionally using worker threads. if sess.opts.cg.codegen_units == 1 { - run_work_singlethreaded(sess, trans.reachable[], work_items); + run_work_singlethreaded(sess, trans.reachable.index(&FullRange), work_items); } else { run_work_multithreaded(sess, work_items, sess.opts.cg.codegen_units); } @@ -667,7 +667,7 @@ pub fn run_passes(sess: &Session, // 2) Multiple codegen units, with `-o some_name`. We have // no good solution for this case, so warn the user. sess.warn(format!("ignoring -o because multiple .{} files were produced", - ext)[]); + ext).index(&FullRange)); } else { // 3) Multiple codegen units, but no `-o some_name`. We // just leave the `foo.0.x` files in place. @@ -700,20 +700,20 @@ pub fn run_passes(sess: &Session, }; let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname[]); + let mut cmd = Command::new(pname.index(&FullRange)); - cmd.args(sess.target.target.options.pre_link_args[]); + cmd.args(sess.target.target.options.pre_link_args.index(&FullRange)); cmd.arg("-nostdlib"); for index in range(0, trans.modules.len()) { - cmd.arg(crate_output.with_extension(format!("{}.o", index)[])); + cmd.arg(crate_output.with_extension(format!("{}.o", index).index(&FullRange))); } cmd.arg("-r") .arg("-o") .arg(windows_output_path.as_ref().unwrap_or(output_path)); - cmd.args(sess.target.target.options.post_link_args[]); + cmd.args(sess.target.target.options.post_link_args.index(&FullRange)); if (sess.opts.debugging_opts & config::PRINT_LINK_ARGS) != 0 { println!("{}", &cmd); @@ -726,14 +726,14 @@ pub fn run_passes(sess: &Session, Ok(status) => { if !status.success() { sess.err(format!("linking of {} with `{}` failed", - output_path.display(), cmd)[]); + output_path.display(), cmd).index(&FullRange)); sess.abort_if_errors(); } }, Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, - e)[]); + e).index(&FullRange)); sess.abort_if_errors(); }, } @@ -818,12 +818,12 @@ pub fn run_passes(sess: &Session, for i in range(0, trans.modules.len()) { if modules_config.emit_obj { let ext = format!("{}.o", i); - remove(sess, &crate_output.with_extension(ext[])); + remove(sess, &crate_output.with_extension(ext.index(&FullRange))); } if modules_config.emit_bc && !keep_numbered_bitcode { let ext = format!("{}.bc", i); - remove(sess, &crate_output.with_extension(ext[])); + remove(sess, &crate_output.with_extension(ext.index(&FullRange))); } } @@ -928,7 +928,7 @@ fn run_work_multithreaded(sess: &Session, } tx.take().unwrap().send(()).unwrap(); - }).detach(); + }); } let mut panicked = false; @@ -949,7 +949,7 @@ fn run_work_multithreaded(sess: &Session, pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname[]); + let mut cmd = Command::new(pname.index(&FullRange)); cmd.arg("-c").arg("-o").arg(outputs.path(config::OutputTypeObject)) .arg(outputs.temp_path(config::OutputTypeAssembly)); @@ -960,18 +960,18 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { if !prog.status.success() { sess.err(format!("linking with `{}` failed: {}", pname, - prog.status)[]); - sess.note(format!("{}", &cmd)[]); + prog.status).index(&FullRange)); + sess.note(format!("{}", &cmd).index(&FullRange)); let mut note = prog.error.clone(); - note.push_all(prog.output[]); - sess.note(str::from_utf8(note[]).unwrap()); + note.push_all(prog.output.index(&FullRange)); + sess.note(str::from_utf8(note.index(&FullRange)).unwrap()); sess.abort_if_errors(); } }, Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, - e)[]); + e).index(&FullRange)); sess.abort_if_errors(); } } @@ -1004,7 +1004,7 @@ unsafe fn configure_llvm(sess: &Session) { if sess.print_llvm_passes() { add("-debug-pass=Structure"); } for arg in sess.opts.cg.llvm_args.iter() { - add((*arg)[]); + add((*arg).index(&FullRange)); } } diff --git a/src/librustc_trans/lib.rs b/src/librustc_trans/lib.rs index 705fecf4d198e..b6f90a4c2f52a 100644 --- a/src/librustc_trans/lib.rs +++ b/src/librustc_trans/lib.rs @@ -22,13 +22,9 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![allow(unknown_features)] -#![feature(default_type_params, globs, macro_rules, phase, quote)] +#![feature(quote)] #![feature(slicing_syntax, unsafe_destructor)] #![feature(rustc_diagnostic_macros)] -#![feature(unboxed_closures)] -#![feature(old_orphan_check)] -#![feature(associated_types)] extern crate arena; extern crate flate; @@ -40,21 +36,8 @@ extern crate rustc_back; extern crate serialize; extern crate "rustc_llvm" as llvm; -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate log; - -#[cfg(not(stage0))] -#[macro_use] -extern crate log; - -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate syntax; - -#[cfg(not(stage0))] -#[macro_use] -extern crate syntax; +#[macro_use] extern crate log; +#[macro_use] extern crate syntax; pub use rustc::session; pub use rustc::metadata; diff --git a/src/librustc_trans/save/mod.rs b/src/librustc_trans/save/mod.rs index 8e6276b61f949..35f168f092a70 100644 --- a/src/librustc_trans/save/mod.rs +++ b/src/librustc_trans/save/mod.rs @@ -94,7 +94,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { // dump info about all the external crates referenced from this crate self.sess.cstore.iter_crate_data(|n, cmd| { - self.fmt.external_crate_str(krate.span, cmd.name[], n); + self.fmt.external_crate_str(krate.span, cmd.name.index(&FullRange), n); }); self.fmt.recorder.record("end_external_crates\n"); } @@ -143,7 +143,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname[], + qualname.index(&FullRange), self.cur_scope); } } @@ -161,7 +161,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname[], + qualname.index(&FullRange), self.cur_scope); } } @@ -180,17 +180,17 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { let (ref span, ref qualname) = sub_paths[len-2]; self.fmt.sub_type_ref_str(path.span, *span, - qualname[]); + qualname.index(&FullRange)); // write the other sub-paths if len <= 2 { return; } - let sub_paths = sub_paths[..len-2]; + let sub_paths = sub_paths.index(&(0..(len-2))); for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname[], + qualname.index(&FullRange), self.cur_scope); } } @@ -199,7 +199,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { fn lookup_type_ref(&self, ref_id: NodeId) -> Option { if !self.analysis.ty_cx.def_map.borrow().contains_key(&ref_id) { self.sess.bug(format!("def_map has no key for {} in lookup_type_ref", - ref_id)[]); + ref_id).index(&FullRange)); } let def = (*self.analysis.ty_cx.def_map.borrow())[ref_id]; match def { @@ -212,7 +212,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&ref_id) { self.sess.span_bug(span, format!("def_map has no key for {} in lookup_def_kind", - ref_id)[]); + ref_id).index(&FullRange)); } let def = (*def_map)[ref_id]; match def { @@ -240,8 +240,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { def::DefUse(_) | def::DefMethod(..) | def::DefPrimTy(_) => { - self.sess.span_bug(span, format!("lookup_def_kind for unexpected item: {}", - def)[]); + self.sess.span_bug(span, format!("lookup_def_kind for unexpected item: {:?}", + def).index(&FullRange)); }, } } @@ -262,8 +262,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { span_utils.span_for_last_ident(p.span), id, qualname, - path_to_string(p)[], - typ[]); + path_to_string(p).index(&FullRange), + typ.index(&FullRange)); } self.collected_paths.clear(); } @@ -285,14 +285,14 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { match item.node { ast::ItemImpl(_, _, _, _, ref ty, _) => { let mut result = String::from_str("<"); - result.push_str(ty_to_string(&**ty)[]); + result.push_str(ty_to_string(&**ty).index(&FullRange)); match ty::trait_of_item(&self.analysis.ty_cx, ast_util::local_def(method.id)) { Some(def_id) => { result.push_str(" as "); result.push_str( - ty::item_path_str(&self.analysis.ty_cx, def_id)[]); + ty::item_path_str(&self.analysis.ty_cx, def_id).as_slice()); }, None => {} } @@ -302,17 +302,17 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { _ => { self.sess.span_bug(method.span, format!("Container {} for method {} not an impl?", - impl_id.node, method.id)[]); + impl_id.node, method.id).index(&FullRange)); }, } }, _ => { self.sess.span_bug(method.span, - format!("Container {} for method {} is not a node item {}", + format!("Container {} for method {} is not a node item {:?}", impl_id.node, method.id, self.analysis.ty_cx.map.get(impl_id.node) - )[]); + ).index(&FullRange)); }, }, None => match ty::trait_of_item(&self.analysis.ty_cx, @@ -328,20 +328,20 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { _ => { self.sess.span_bug(method.span, format!("Could not find container {} for method {}", - def_id.node, method.id)[]); + def_id.node, method.id).index(&FullRange)); } } }, None => { self.sess.span_bug(method.span, format!("Could not find container for method {}", - method.id)[]); + method.id).index(&FullRange)); }, }, }; qualname.push_str(get_ident(method.pe_ident()).get()); - let qualname = qualname[]; + let qualname = qualname.index(&FullRange); // record the decl for this def (if it has one) let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx, @@ -430,13 +430,13 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { Some(sub_span) => self.fmt.field_str(field.span, Some(sub_span), field.node.id, - name.get()[], - qualname[], - typ[], + name.get().index(&FullRange), + qualname.index(&FullRange), + typ.index(&FullRange), scope_id), None => self.sess.span_bug(field.span, format!("Could not find sub-span for field {}", - qualname)[]), + qualname).index(&FullRange)), } }, _ => (), @@ -463,7 +463,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.typedef_str(full_span, Some(*param_ss), param.id, - name[], + name.index(&FullRange), ""); } self.visit_generics(generics); @@ -480,10 +480,10 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.fn_str(item.span, sub_span, item.id, - qualname[], + qualname.index(&FullRange), self.cur_scope); - self.process_formals(&decl.inputs, qualname[]); + self.process_formals(&decl.inputs, qualname.index(&FullRange)); // walk arg and return types for arg in decl.inputs.iter() { @@ -497,7 +497,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { // walk the body self.nest(item.id, |v| v.visit_block(&*body)); - self.process_generic_params(ty_params, item.span, qualname[], item.id); + self.process_generic_params(ty_params, item.span, qualname.index(&FullRange), item.id); } fn process_static(&mut self, @@ -519,9 +519,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, get_ident(item.ident).get(), - qualname[], - value[], - ty_to_string(&*typ)[], + qualname.index(&FullRange), + value.index(&FullRange), + ty_to_string(&*typ).index(&FullRange), self.cur_scope); // walk type and init value @@ -542,9 +542,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, get_ident(item.ident).get(), - qualname[], + qualname.index(&FullRange), "", - ty_to_string(&*typ)[], + ty_to_string(&*typ).index(&FullRange), self.cur_scope); // walk type and init value @@ -568,17 +568,17 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, ctor_id, - qualname[], + qualname.index(&FullRange), self.cur_scope, - val[]); + val.index(&FullRange)); // fields for field in def.fields.iter() { - self.process_struct_field_def(field, qualname[], item.id); + self.process_struct_field_def(field, qualname.index(&FullRange), item.id); self.visit_ty(&*field.node.ty); } - self.process_generic_params(ty_params, item.span, qualname[], item.id); + self.process_generic_params(ty_params, item.span, qualname.index(&FullRange), item.id); } fn process_enum(&mut self, @@ -591,12 +591,12 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { Some(sub_span) => self.fmt.enum_str(item.span, Some(sub_span), item.id, - enum_name[], + enum_name.index(&FullRange), self.cur_scope, - val[]), + val.index(&FullRange)), None => self.sess.span_bug(item.span, format!("Could not find subspan for enum {}", - enum_name)[]), + enum_name).index(&FullRange)), } for variant in enum_definition.variants.iter() { let name = get_ident(variant.node.name); @@ -612,9 +612,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.span.span_for_first_ident(variant.span), variant.node.id, name, - qualname[], - enum_name[], - val[], + qualname.index(&FullRange), + enum_name.index(&FullRange), + val.index(&FullRange), item.id); for arg in args.iter() { self.visit_ty(&*arg.ty); @@ -630,20 +630,20 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.span.span_for_first_ident(variant.span), variant.node.id, ctor_id, - qualname[], - enum_name[], - val[], + qualname.index(&FullRange), + enum_name.index(&FullRange), + val.index(&FullRange), item.id); for field in struct_def.fields.iter() { - self.process_struct_field_def(field, qualname[], variant.node.id); + self.process_struct_field_def(field, qualname.as_slice(), variant.node.id); self.visit_ty(&*field.node.ty); } } } } - self.process_generic_params(ty_params, item.span, enum_name[], item.id); + self.process_generic_params(ty_params, item.span, enum_name.index(&FullRange), item.id); } fn process_impl(&mut self, @@ -703,9 +703,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.trait_str(item.span, sub_span, item.id, - qualname[], + qualname.index(&FullRange), self.cur_scope, - val[]); + val.index(&FullRange)); // super-traits for super_bound in trait_refs.iter() { @@ -737,7 +737,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { } // walk generics and methods - self.process_generic_params(generics, item.span, qualname[], item.id); + self.process_generic_params(generics, item.span, qualname.index(&FullRange), item.id); for method in methods.iter() { self.visit_trait_item(method) } @@ -755,9 +755,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.mod_str(item.span, sub_span, item.id, - qualname[], + qualname.index(&FullRange), self.cur_scope, - filename[]); + filename.index(&FullRange)); self.nest(item.id, |v| visit::walk_mod(v, m)); } @@ -774,7 +774,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&id) { self.sess.span_bug(span, - format!("def_map has no key for {} in visit_expr", id)[]); + format!("def_map has no key for {} in visit_expr", id).as_slice()); } let def = &(*def_map)[id]; let sub_span = self.span.span_for_last_ident(span); @@ -841,7 +841,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.cur_scope), _ => self.sess.span_bug(span, format!("Unexpected def kind while looking up path in '{}'", - self.span.snippet(span))[]), + self.span.snippet(span)).index(&FullRange)), } // modules or types in the path prefix match *def { @@ -959,7 +959,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.cur_scope); // walk receiver and args - visit::walk_exprs(self, args[]); + visit::walk_exprs(self, args.index(&FullRange)); } fn process_pat(&mut self, p:&ast::Pat) { @@ -976,7 +976,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { None => { self.sess.span_bug(p.span, format!("Could not find struct_def for `{}`", - self.span.snippet(p.span))[]); + self.span.snippet(p.span)).index(&FullRange)); } }; for &Spanned { node: ref field, span } in fields.iter() { @@ -1061,11 +1061,11 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { self.fmt.typedef_str(item.span, sub_span, item.id, - qualname[], - value[]); + qualname.index(&FullRange), + value.index(&FullRange)); self.visit_ty(&**ty); - self.process_generic_params(ty_params, item.span, qualname[], item.id); + self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id); }, ast::ItemMac(_) => (), _ => visit::walk_item(self, item), @@ -1122,12 +1122,12 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { None => { self.sess.span_bug(method_type.span, format!("Could not find trait for method {}", - method_type.id)[]); + method_type.id).index(&FullRange)); }, }; qualname.push_str(get_ident(method_type.ident).get()); - let qualname = qualname[]; + let qualname = qualname.index(&FullRange); let sub_span = self.span.sub_span_after_keyword(method_type.span, keywords::Fn); self.fmt.method_decl_str(method_type.span, @@ -1262,7 +1262,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { id, cnum, name, - s[], + s.index(&FullRange), self.cur_scope); }, } @@ -1371,8 +1371,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { } let mut id = String::from_str("$"); - id.push_str(ex.id.to_string()[]); - self.process_formals(&decl.inputs, id[]); + id.push_str(ex.id.to_string().index(&FullRange)); + self.process_formals(&decl.inputs, id.index(&FullRange)); // walk arg and return types for arg in decl.inputs.iter() { @@ -1418,7 +1418,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&id) { self.sess.span_bug(p.span, - format!("def_map has no key for {} in visit_arm", id)[]); + format!("def_map has no key for {} in visit_arm", + id).index(&FullRange)); } let def = &(*def_map)[id]; match *def { @@ -1433,8 +1434,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { self.fmt.variable_str(p.span, Some(p.span), id, - path_to_string(p)[], - value[], + path_to_string(p).index(&FullRange), + value.index(&FullRange), "") } def::DefVariant(..) => { @@ -1443,7 +1444,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { // FIXME(nrc) what are these doing here? def::DefStatic(_, _) => {} def::DefConst(..) => {} - _ => error!("unexpected definition kind when processing collected paths: {}", *def) + _ => error!("unexpected definition kind when processing collected paths: {:?}", + *def) } } for &(id, span, ref path, ref_kind) in paths_to_process.iter() { @@ -1488,9 +1490,9 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { self.fmt.variable_str(p.span, sub_span, id, - path_to_string(p)[], - value[], - typ[]); + path_to_string(p).index(&FullRange), + value.index(&FullRange), + typ.index(&FullRange)); } self.collected_paths.clear(); @@ -1509,7 +1511,7 @@ pub fn process_crate(sess: &Session, } assert!(analysis.glob_map.is_some()); - let cratename = match attr::find_crate_name(krate.attrs[]) { + let cratename = match attr::find_crate_name(krate.attrs.index(&FullRange)) { Some(name) => name.get().to_string(), None => { info!("Could not find crate name, using 'unknown_crate'"); @@ -1530,7 +1532,7 @@ pub fn process_crate(sess: &Session, match fs::mkdir_recursive(&root_path, io::USER_RWX) { Err(e) => sess.err(format!("Could not create directory {}: {}", - root_path.display(), e)[]), + root_path.display(), e).index(&FullRange)), _ => (), } @@ -1547,7 +1549,7 @@ pub fn process_crate(sess: &Session, Ok(f) => box f, Err(e) => { let disp = root_path.display(); - sess.fatal(format!("Could not open {}: {}", disp, e)[]); + sess.fatal(format!("Could not open {}: {}", disp, e).index(&FullRange)); } }; root_path.pop(); @@ -1573,7 +1575,7 @@ pub fn process_crate(sess: &Session, cur_scope: 0 }; - visitor.dump_crate_info(cratename[], krate); + visitor.dump_crate_info(cratename.index(&FullRange), krate); visit::walk_crate(&mut visitor, krate); } diff --git a/src/librustc_trans/save/recorder.rs b/src/librustc_trans/save/recorder.rs index 679a8d2d07bc8..bb0fb38700208 100644 --- a/src/librustc_trans/save/recorder.rs +++ b/src/librustc_trans/save/recorder.rs @@ -41,7 +41,7 @@ impl Recorder { assert!(self.dump_spans); let result = format!("span,kind,{},{},text,\"{}\"\n", kind, su.extent_str(span), escape(su.snippet(span))); - self.record(result[]); + self.record(result.index(&FullRange)); } } @@ -160,15 +160,15 @@ impl<'a> FmtStrs<'a> { if values.len() != fields.len() { self.span.sess.span_bug(span, format!( "Mismatch between length of fields for '{}', expected '{}', found '{}'", - kind, fields.len(), values.len())[]); + kind, fields.len(), values.len()).index(&FullRange)); } let values = values.iter().map(|s| { // Never take more than 1020 chars if s.len() > 1020 { - s[..1020] + s.index(&(0..1020)) } else { - s[] + s.index(&FullRange) } }); @@ -184,7 +184,7 @@ impl<'a> FmtStrs<'a> { } ))); Some(strs.fold(String::new(), |mut s, ss| { - s.push_str(ss[]); + s.push_str(ss.index(&FullRange)); s })) } @@ -198,7 +198,7 @@ impl<'a> FmtStrs<'a> { if needs_span { self.span.sess.span_bug(span, format!( "Called record_without_span for '{}' which does requires a span", - label)[]); + label).index(&FullRange)); } assert!(!dump_spans); @@ -212,9 +212,9 @@ impl<'a> FmtStrs<'a> { }; let mut result = String::from_str(label); - result.push_str(values_str[]); + result.push_str(values_str.index(&FullRange)); result.push_str("\n"); - self.recorder.record(result[]); + self.recorder.record(result.index(&FullRange)); } pub fn record_with_span(&mut self, @@ -237,7 +237,7 @@ impl<'a> FmtStrs<'a> { if !needs_span { self.span.sess.span_bug(span, format!("Called record_with_span for '{}' \ - which does not require a span", label)[]); + which does not require a span", label).as_slice()); } let values_str = match self.make_values_str(label, fields, values, span) { @@ -245,7 +245,7 @@ impl<'a> FmtStrs<'a> { None => return, }; let result = format!("{},{}{}\n", label, self.span.extent_str(sub_span), values_str); - self.recorder.record(result[]); + self.recorder.record(result.index(&FullRange)); } pub fn check_and_record(&mut self, @@ -275,7 +275,7 @@ impl<'a> FmtStrs<'a> { // variable def's node id let mut qualname = String::from_str(name); qualname.push_str("$"); - qualname.push_str(id.to_string()[]); + qualname.push_str(id.to_string().index(&FullRange)); self.check_and_record(Variable, span, sub_span, diff --git a/src/librustc_trans/save/span_utils.rs b/src/librustc_trans/save/span_utils.rs index 14c6475c87df9..8d249b8bfe903 100644 --- a/src/librustc_trans/save/span_utils.rs +++ b/src/librustc_trans/save/span_utils.rs @@ -218,7 +218,7 @@ impl<'a> SpanUtils<'a> { let loc = self.sess.codemap().lookup_char_pos(span.lo); self.sess.span_bug(span, format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}", - self.snippet(span), loc.file.name, loc.line)[]); + self.snippet(span), loc.file.name, loc.line).index(&FullRange)); } if result.is_none() && prev.tok.is_ident() && bracket_count == 0 { return self.make_sub_span(span, Some(prev.sp)); @@ -244,7 +244,7 @@ impl<'a> SpanUtils<'a> { let loc = self.sess.codemap().lookup_char_pos(span.lo); self.sess.span_bug(span, format!( "Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}", - self.snippet(span), loc.file.name, loc.line)[]); + self.snippet(span), loc.file.name, loc.line).index(&FullRange)); } return result } diff --git a/src/librustc_trans/trans/_match.rs b/src/librustc_trans/trans/_match.rs index fed0931cab71d..49b9ef5a40ade 100644 --- a/src/librustc_trans/trans/_match.rs +++ b/src/librustc_trans/trans/_match.rs @@ -427,7 +427,7 @@ fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, let _indenter = indenter(); m.iter().filter_map(|br| { - e(br.pats[]).map(|pats| { + e(br.pats.index(&FullRange)).map(|pats| { let this = br.pats[col]; let mut bound_ptrs = br.bound_ptrs.clone(); match this.node { @@ -471,8 +471,8 @@ fn enter_default<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Collect all of the matches that can match against anything. enter_match(bcx, dm, m, col, val, |pats| { if pat_is_binding_or_wild(dm, &*pats[col]) { - let mut r = pats[..col].to_vec(); - r.push_all(pats[col + 1..]); + let mut r = pats.index(&(0..col)).to_vec(); + r.push_all(pats.index(&((col + 1)..))); Some(r) } else { None @@ -518,7 +518,7 @@ fn enter_opt<'a, 'p, 'blk, 'tcx>( variant_size: uint, val: ValueRef) -> Vec> { - debug!("enter_opt(bcx={}, m={}, opt={}, col={}, val={})", + debug!("enter_opt(bcx={}, m={}, opt={:?}, col={}, val={})", bcx.to_str(), m.repr(bcx.tcx()), *opt, @@ -548,7 +548,7 @@ fn enter_opt<'a, 'p, 'blk, 'tcx>( param_env: param_env, }; enter_match(bcx, dm, m, col, val, |pats| - check_match::specialize(&mcx, pats[], &ctor, col, variant_size) + check_match::specialize(&mcx, pats.index(&FullRange), &ctor, col, variant_size) ) } @@ -790,7 +790,7 @@ fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>, let did = langcall(cx, None, format!("comparison of `{}`", - cx.ty_to_string(rhs_t))[], + cx.ty_to_string(rhs_t)).index(&FullRange), StrEqFnLangItem); callee::trans_lang_call(cx, did, &[lhs, rhs], None) } @@ -945,7 +945,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if has_nested_bindings(m, col) { let expanded = expand_nested_bindings(bcx, m, col, val); compile_submatch_continue(bcx, - expanded[], + expanded.index(&FullRange), vals, chk, col, @@ -967,7 +967,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx = compile_guard(bcx, &**guard_expr, m[0].data, - m[1..m.len()], + m.index(&(1..m.len())), vals, chk, has_genuine_default); @@ -990,8 +990,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let tcx = bcx.tcx(); let dm = &tcx.def_map; - let mut vals_left = vals[0u..col].to_vec(); - vals_left.push_all(vals[col + 1u..]); + let mut vals_left = vals.index(&(0u..col)).to_vec(); + vals_left.push_all(vals.index(&((col + 1u)..))); let ccx = bcx.fcx.ccx; // Find a real id (we're adding placeholder wildcard patterns, but @@ -1037,8 +1037,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, field_vals.len()) ); let mut vals = field_vals; - vals.push_all(vals_left[]); - compile_submatch(bcx, pats[], vals[], chk, has_genuine_default); + vals.push_all(vals_left.as_slice()); + compile_submatch(bcx, pats.as_slice(), vals.as_slice(), chk, has_genuine_default); return; } _ => () @@ -1046,7 +1046,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, // Decide what kind of branch we need let opts = get_branches(bcx, m, col); - debug!("options={}", opts); + debug!("options={:?}", opts); let mut kind = NoBranch; let mut test_val = val; debug!("test_val={}", bcx.val_to_string(test_val)); @@ -1191,10 +1191,10 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val); let mut opt_vals = unpacked; - opt_vals.push_all(vals_left[]); + opt_vals.push_all(vals_left.index(&FullRange)); compile_submatch(opt_cx, - opt_ms[], - opt_vals[], + opt_ms.index(&FullRange), + opt_vals.index(&FullRange), branch_chk.as_ref().unwrap_or(chk), has_genuine_default); } @@ -1213,8 +1213,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } _ => { compile_submatch(else_cx, - defaults[], - vals_left[], + defaults.index(&FullRange), + vals_left.index(&FullRange), chk, has_genuine_default); } @@ -1333,7 +1333,7 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat, "__llmatch"); trmode = TrByCopy(alloca_no_lifetime(bcx, llvariable_ty, - bcx.ident(ident)[])); + bcx.ident(ident).index(&FullRange))); } ast::BindByValue(_) => { // in this case, the final type of the variable will be T, @@ -1341,13 +1341,13 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat, // above llmatch = alloca_no_lifetime(bcx, llvariable_ty.ptr_to(), - bcx.ident(ident)[]); + bcx.ident(ident).index(&FullRange)); trmode = TrByMove; } ast::BindByRef(_) => { llmatch = alloca_no_lifetime(bcx, llvariable_ty, - bcx.ident(ident)[]); + bcx.ident(ident).index(&FullRange)); trmode = TrByRef; } }; @@ -1415,7 +1415,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>, && arm.pats.last().unwrap().node == ast::PatWild(ast::PatWildSingle) }); - compile_submatch(bcx, matches[], &[discr_datum.val], &chk, has_default); + compile_submatch(bcx, matches.index(&FullRange), &[discr_datum.val], &chk, has_default); let mut arm_cxs = Vec::new(); for arm_data in arm_datas.iter() { @@ -1429,7 +1429,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>, arm_cxs.push(bcx); } - bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs[]); + bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs.index(&FullRange)); return bcx; } @@ -1582,7 +1582,7 @@ fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>, let var_ty = node_id_type(bcx, p_id); // Allocate memory on stack for the binding. - let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident)[]); + let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident).index(&FullRange)); // Subtle: be sure that we *populate* the memory *before* // we schedule the cleanup. @@ -1620,7 +1620,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if bcx.sess().asm_comments() { add_comment(bcx, format!("bind_irrefutable_pat(pat={})", - pat.repr(bcx.tcx()))[]); + pat.repr(bcx.tcx())).index(&FullRange)); } let _indenter = indenter(); diff --git a/src/librustc_trans/trans/adt.rs b/src/librustc_trans/trans/adt.rs index 01b47b728b6ba..231de71848a1d 100644 --- a/src/librustc_trans/trans/adt.rs +++ b/src/librustc_trans/trans/adt.rs @@ -145,7 +145,7 @@ pub fn represent_type<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } let repr = Rc::new(represent_type_uncached(cx, t)); - debug!("Represented as: {}", repr); + debug!("Represented as: {:?}", repr); cx.adt_reprs().borrow_mut().insert(t, repr.clone()); repr } @@ -154,7 +154,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Repr<'tcx> { match t.sty { ty::ty_tup(ref elems) => { - Univariant(mk_struct(cx, elems[], false, t), false) + Univariant(mk_struct(cx, elems.index(&FullRange), false, t), false) } ty::ty_struct(def_id, substs) => { let fields = ty::lookup_struct_fields(cx.tcx(), def_id); @@ -165,17 +165,17 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag(); if dtor { ftys.push(cx.tcx().types.bool); } - Univariant(mk_struct(cx, ftys[], packed, t), dtor) + Univariant(mk_struct(cx, ftys.index(&FullRange), packed, t), dtor) } ty::ty_unboxed_closure(def_id, _, substs) => { let typer = NormalizingUnboxedClosureTyper::new(cx.tcx()); let upvars = typer.unboxed_closure_upvars(def_id, substs).unwrap(); let upvar_types = upvars.iter().map(|u| u.ty).collect::>(); - Univariant(mk_struct(cx, upvar_types[], false, t), false) + Univariant(mk_struct(cx, upvar_types.index(&FullRange), false, t), false) } ty::ty_enum(def_id, substs) => { let cases = get_cases(cx.tcx(), def_id, substs); - let hint = *ty::lookup_repr_hints(cx.tcx(), def_id)[].get(0) + let hint = *ty::lookup_repr_hints(cx.tcx(), def_id).index(&FullRange).get(0) .unwrap_or(&attr::ReprAny); let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag(); @@ -185,7 +185,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // (Typechecking will reject discriminant-sizing attrs.) assert_eq!(hint, attr::ReprAny); let ftys = if dtor { vec!(cx.tcx().types.bool) } else { vec!() }; - return Univariant(mk_struct(cx, ftys[], false, t), + return Univariant(mk_struct(cx, ftys.index(&FullRange), false, t), dtor); } @@ -208,7 +208,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, cx.sess().bug(format!("non-C-like enum {} with specified \ discriminants", ty::item_path_str(cx.tcx(), - def_id))[]); + def_id)).index(&FullRange)); } if cases.len() == 1 { @@ -217,7 +217,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, assert_eq!(hint, attr::ReprAny); let mut ftys = cases[0].tys.clone(); if dtor { ftys.push(cx.tcx().types.bool); } - return Univariant(mk_struct(cx, ftys[], false, t), + return Univariant(mk_struct(cx, ftys.index(&FullRange), false, t), dtor); } @@ -226,7 +226,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let mut discr = 0; while discr < 2 { if cases[1 - discr].is_zerolen(cx, t) { - let st = mk_struct(cx, cases[discr].tys[], + let st = mk_struct(cx, cases[discr].tys.index(&FullRange), false, t); match cases[discr].find_ptr(cx) { Some(ref df) if df.len() == 1 && st.fields.len() == 1 => { @@ -316,17 +316,17 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let fields : Vec<_> = cases.iter().map(|c| { let mut ftys = vec!(ty_of_inttype(cx.tcx(), ity)); - ftys.push_all(c.tys[]); + ftys.push_all(c.tys.index(&FullRange)); if dtor { ftys.push(cx.tcx().types.bool); } - mk_struct(cx, ftys[], false, t) + mk_struct(cx, ftys.index(&FullRange), false, t) }).collect(); - ensure_enum_fits_in_address_space(cx, ity, fields[], t); + ensure_enum_fits_in_address_space(cx, ity, fields.index(&FullRange), t); General(ity, fields, dtor) } _ => cx.sess().bug(format!("adt::represent_type called on non-ADT type: {}", - ty_to_string(cx.tcx(), t))[]) + ty_to_string(cx.tcx(), t)).index(&FullRange)) } } @@ -412,7 +412,7 @@ fn find_discr_field_candidate<'tcx>(tcx: &ty::ctxt<'tcx>, impl<'tcx> Case<'tcx> { fn is_zerolen<'a>(&self, cx: &CrateContext<'a, 'tcx>, scapegoat: Ty<'tcx>) -> bool { - mk_struct(cx, self.tys[], false, scapegoat).size == 0 + mk_struct(cx, self.tys.index(&FullRange), false, scapegoat).size == 0 } fn find_ptr<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Option { @@ -451,9 +451,9 @@ fn mk_struct<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, .map(|&ty| type_of::sizing_type_of(cx, ty)).collect() }; - ensure_struct_fits_in_address_space(cx, lltys[], packed, scapegoat); + ensure_struct_fits_in_address_space(cx, lltys.index(&FullRange), packed, scapegoat); - let llty_rec = Type::struct_(cx, lltys[], packed); + let llty_rec = Type::struct_(cx, lltys.index(&FullRange), packed); Struct { size: machine::llsize_of_alloc(cx, llty_rec), align: machine::llalign_of_min(cx, llty_rec), @@ -482,7 +482,7 @@ fn mk_cenum<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } fn range_to_inttype(cx: &CrateContext, hint: Hint, bounds: &IntBounds) -> IntType { - debug!("range_to_inttype: {} {}", hint, bounds); + debug!("range_to_inttype: {:?} {:?}", hint, bounds); // Lists of sizes to try. u64 is always allowed as a fallback. #[allow(non_upper_case_globals)] static choose_shortest: &'static[IntType] = &[ @@ -502,7 +502,7 @@ fn range_to_inttype(cx: &CrateContext, hint: Hint, bounds: &IntBounds) -> IntTyp return ity; } attr::ReprExtern => { - attempts = match cx.sess().target.target.arch[] { + attempts = match cx.sess().target.target.arch.index(&FullRange) { // WARNING: the ARM EABI has two variants; the one corresponding to `at_least_32` // appears to be used on Linux and NetBSD, but some systems may use the variant // corresponding to `choose_shortest`. However, we don't run on those yet...? @@ -533,7 +533,7 @@ pub fn ll_inttype(cx: &CrateContext, ity: IntType) -> Type { } fn bounds_usable(cx: &CrateContext, ity: IntType, bounds: &IntBounds) -> bool { - debug!("bounds_usable: {} {}", ity, bounds); + debug!("bounds_usable: {:?} {:?}", ity, bounds); match ity { attr::SignedInt(_) => { let lllo = C_integral(ll_inttype(cx, ity), bounds.slo as u64, true); @@ -628,7 +628,7 @@ pub fn finish_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match *r { CEnum(..) | General(..) | RawNullablePointer { .. } => { } Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } => - llty.set_struct_body(struct_llfields(cx, st, false, false)[], + llty.set_struct_body(struct_llfields(cx, st, false, false).index(&FullRange), st.packed) } } @@ -644,7 +644,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } => { match name { None => { - Type::struct_(cx, struct_llfields(cx, st, sizing, dst)[], + Type::struct_(cx, struct_llfields(cx, st, sizing, dst).index(&FullRange), st.packed) } Some(name) => { assert_eq!(sizing, false); Type::named_struct(cx, name) } @@ -663,7 +663,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // of the size. // // FIXME #10604: this breaks when vector types are present. - let (size, align) = union_size_and_align(sts[]); + let (size, align) = union_size_and_align(sts.index(&FullRange)); let align_s = align as u64; let discr_ty = ll_inttype(cx, ity); let discr_size = machine::llsize_of_alloc(cx, discr_ty); @@ -684,10 +684,10 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, Type::array(&discr_ty, align_s / discr_size - 1), fill_ty]; match name { - None => Type::struct_(cx, fields[], false), + None => Type::struct_(cx, fields.index(&FullRange), false), Some(name) => { let mut llty = Type::named_struct(cx, name); - llty.set_struct_body(fields[], false); + llty.set_struct_body(fields.index(&FullRange), false); llty } } @@ -731,7 +731,7 @@ pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>, -> ValueRef { let signed; let val; - debug!("trans_get_discr r: {}", r); + debug!("trans_get_discr r: {:?}", r); match *r { CEnum(ity, min, max) => { val = load_discr(bcx, ity, scrutinee, min, max); @@ -765,7 +765,7 @@ pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>, fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: Disr, discrfield: &DiscrField, scrutinee: ValueRef) -> ValueRef { - let llptrptr = GEPi(bcx, scrutinee, discrfield[]); + let llptrptr = GEPi(bcx, scrutinee, discrfield.index(&FullRange)); let llptr = Load(bcx, llptrptr); let cmp = if nndiscr == 0 { IntEQ } else { IntNE }; ICmp(bcx, cmp, llptr, C_null(val_ty(llptr))) @@ -853,7 +853,7 @@ pub fn trans_set_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>, } StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => { if discr != nndiscr { - let llptrptr = GEPi(bcx, val, discrfield[]); + let llptrptr = GEPi(bcx, val, discrfield.index(&FullRange)); let llptrty = val_ty(llptrptr).element_type(); Store(bcx, C_null(llptrty), llptrptr) } @@ -935,7 +935,7 @@ pub fn struct_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, st: &Struct<'tcx>, v let val = if needs_cast { let ccx = bcx.ccx(); let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::>(); - let real_ty = Type::struct_(ccx, fields[], st.packed); + let real_ty = Type::struct_(ccx, fields.index(&FullRange), st.packed); PointerCast(bcx, val, real_ty.ptr_to()) } else { val @@ -967,14 +967,14 @@ pub fn fold_variants<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, for (discr, case) in cases.iter().enumerate() { let mut variant_cx = fcx.new_temp_block( - format!("enum-variant-iter-{}", discr.to_string())[] + format!("enum-variant-iter-{}", discr.to_string()).index(&FullRange) ); let rhs_val = C_integral(ll_inttype(ccx, ity), discr as u64, true); AddCase(llswitch, rhs_val, variant_cx.llbb); let fields = case.fields.iter().map(|&ty| type_of::type_of(bcx.ccx(), ty)).collect::>(); - let real_ty = Type::struct_(ccx, fields[], case.packed); + let real_ty = Type::struct_(ccx, fields.index(&FullRange), case.packed); let variant_value = PointerCast(variant_cx, value, real_ty.ptr_to()); variant_cx = f(variant_cx, case, variant_value); @@ -1051,14 +1051,14 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true); let mut f = vec![lldiscr]; f.push_all(vals); - let mut contents = build_const_struct(ccx, case, f[]); + let mut contents = build_const_struct(ccx, case, f.index(&FullRange)); contents.push_all(&[padding(ccx, max_sz - case.size)]); - C_struct(ccx, contents[], false) + C_struct(ccx, contents.index(&FullRange), false) } Univariant(ref st, _dro) => { assert!(discr == 0); let contents = build_const_struct(ccx, st, vals); - C_struct(ccx, contents[], st.packed) + C_struct(ccx, contents.index(&FullRange), st.packed) } RawNullablePointer { nndiscr, nnty, .. } => { if discr == nndiscr { @@ -1072,7 +1072,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr if discr == nndiscr { C_struct(ccx, build_const_struct(ccx, nonnull, - vals)[], + vals).index(&FullRange), false) } else { let vals = nonnull.fields.iter().map(|&ty| { @@ -1082,7 +1082,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr }).collect::>(); C_struct(ccx, build_const_struct(ccx, nonnull, - vals[])[], + vals.index(&FullRange)).index(&FullRange), false) } } diff --git a/src/librustc_trans/trans/asm.rs b/src/librustc_trans/trans/asm.rs index f18d483f70328..890f046be1b2e 100644 --- a/src/librustc_trans/trans/asm.rs +++ b/src/librustc_trans/trans/asm.rs @@ -71,7 +71,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) callee::DontAutorefArg) }) }).collect::>(); - inputs.push_all(ext_inputs[]); + inputs.push_all(ext_inputs.index(&FullRange)); // no failure occurred preparing operands, no need to cleanup fcx.pop_custom_cleanup_scope(temp_scope); @@ -91,18 +91,18 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) if !clobbers.is_empty() { clobbers.push(','); } - clobbers.push_str(more_clobbers[]); + clobbers.push_str(more_clobbers.index(&FullRange)); } // Add the clobbers to our constraints list if clobbers.len() != 0 && constraints.len() != 0 { constraints.push(','); - constraints.push_str(clobbers[]); + constraints.push_str(clobbers.index(&FullRange)); } else { - constraints.push_str(clobbers[]); + constraints.push_str(clobbers.index(&FullRange)); } - debug!("Asm Constraints: {}", constraints[]); + debug!("Asm Constraints: {}", constraints.index(&FullRange)); let num_outputs = outputs.len(); @@ -112,7 +112,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) } else if num_outputs == 1 { output_types[0] } else { - Type::struct_(bcx.ccx(), output_types[], false) + Type::struct_(bcx.ccx(), output_types.index(&FullRange), false) }; let dialect = match ia.dialect { diff --git a/src/librustc_trans/trans/base.rs b/src/librustc_trans/trans/base.rs index edcfaae0f802d..057d0f378e6f4 100644 --- a/src/librustc_trans/trans/base.rs +++ b/src/librustc_trans/trans/base.rs @@ -249,7 +249,7 @@ fn get_extern_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty<'tcx>, let f = decl_rust_fn(ccx, fn_ty, name); csearch::get_item_attrs(&ccx.sess().cstore, did, |attrs| { - set_llvm_fn_attrs(ccx, attrs[], f) + set_llvm_fn_attrs(ccx, attrs.index(&FullRange), f) }); ccx.externs().borrow_mut().insert(name.to_string(), f); @@ -283,35 +283,40 @@ pub fn decl_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty<'tcx>, name: &str) -> ValueRef { let fn_ty = monomorphize::normalize_associated_type(ccx.tcx(), &fn_ty); - let (inputs, output, abi, env) = match fn_ty.sty { + let function_type; // placeholder so that the memory ownership works out ok + + let (sig, abi, env) = match fn_ty.sty { ty::ty_bare_fn(_, ref f) => { - (f.sig.0.inputs.clone(), f.sig.0.output, f.abi, None) + (&f.sig, f.abi, None) } ty::ty_unboxed_closure(closure_did, _, substs) => { let typer = common::NormalizingUnboxedClosureTyper::new(ccx.tcx()); - let function_type = typer.unboxed_closure_type(closure_did, substs); + function_type = typer.unboxed_closure_type(closure_did, substs); let self_type = self_type_for_unboxed_closure(ccx, closure_did, fn_ty); let llenvironment_type = type_of_explicit_arg(ccx, self_type); debug!("decl_rust_fn: function_type={} self_type={}", function_type.repr(ccx.tcx()), self_type.repr(ccx.tcx())); - (function_type.sig.0.inputs, - function_type.sig.0.output, - RustCall, - Some(llenvironment_type)) + (&function_type.sig, RustCall, Some(llenvironment_type)) } _ => panic!("expected closure or fn") }; - let llfty = type_of_rust_fn(ccx, env, inputs[], output, abi); - debug!("decl_rust_fn(input count={},type={})", - inputs.len(), + let sig = ty::erase_late_bound_regions(ccx.tcx(), sig); + let sig = ty::Binder(sig); + + let llfty = type_of_rust_fn(ccx, env, &sig, abi); + + debug!("decl_rust_fn(sig={}, type={})", + sig.repr(ccx.tcx()), ccx.tn().type_to_string(llfty)); - let llfn = decl_fn(ccx, name, llvm::CCallConv, llfty, output); + let llfn = decl_fn(ccx, name, llvm::CCallConv, llfty, sig.0.output /* (1) */); let attrs = get_fn_llvm_attributes(ccx, fn_ty); attrs.apply_llfn(llfn); + // (1) it's ok to directly access sig.0.output because we erased all late-bound-regions above + llfn } @@ -369,7 +374,7 @@ fn require_alloc_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, Err(s) => { bcx.sess().fatal(format!("allocation of `{}` {}", bcx.ty_to_string(info_ty), - s)[]); + s).index(&FullRange)); } } } @@ -488,7 +493,7 @@ pub fn unset_split_stack(f: ValueRef) { // silently mangles such symbols, breaking our linkage model. pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: String) { if ccx.all_llvm_symbols().borrow().contains(&sym) { - ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym)[]); + ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym).index(&FullRange)); } ccx.all_llvm_symbols().borrow_mut().insert(sym); } @@ -525,7 +530,7 @@ pub fn get_res_dtor<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty::mk_nil(ccx.tcx())); get_extern_fn(ccx, &mut *ccx.externs().borrow_mut(), - name[], + name.index(&FullRange), llvm::CCallConv, llty, dtor_ty) @@ -774,8 +779,8 @@ pub fn iter_structural_ty<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>, let variant_cx = fcx.new_temp_block( format!("enum-iter-variant-{}", - variant.disr_val.to_string()[]) - []); + variant.disr_val.to_string().index(&FullRange)) + .index(&FullRange)); match adt::trans_case(cx, &*repr, variant.disr_val) { _match::SingleResult(r) => { AddCase(llswitch, r.val, variant_cx.llbb) @@ -800,7 +805,7 @@ pub fn iter_structural_ty<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>, } _ => { cx.sess().unimpl(format!("type in iter_structural_ty: {}", - ty_to_string(cx.tcx(), t))[]) + ty_to_string(cx.tcx(), t)).index(&FullRange)) } } return cx; @@ -882,7 +887,7 @@ pub fn fail_if_zero_or_overflows<'blk, 'tcx>( } _ => { cx.sess().bug(format!("fail-if-zero on unexpected type: {}", - ty_to_string(cx.tcx(), rhs_t))[]); + ty_to_string(cx.tcx(), rhs_t)).index(&FullRange)); } }; let bcx = with_cond(cx, is_zero, |bcx| { @@ -903,8 +908,8 @@ pub fn fail_if_zero_or_overflows<'blk, 'tcx>( ty::ty_int(t) => { let llty = Type::int_from_ty(cx.ccx(), t); let min = match t { - ast::TyI if llty == Type::i32(cx.ccx()) => i32::MIN as u64, - ast::TyI => i64::MIN as u64, + ast::TyIs if llty == Type::i32(cx.ccx()) => i32::MIN as u64, + ast::TyIs => i64::MIN as u64, ast::TyI8 => i8::MIN as u64, ast::TyI16 => i16::MIN as u64, ast::TyI32 => i32::MIN as u64, @@ -936,14 +941,14 @@ pub fn trans_external_path<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty::ty_bare_fn(_, ref fn_ty) => { match ccx.sess().target.target.adjust_abi(fn_ty.abi) { Rust | RustCall => { - get_extern_rust_fn(ccx, t, name[], did) + get_extern_rust_fn(ccx, t, name.index(&FullRange), did) } RustIntrinsic => { ccx.sess().bug("unexpected intrinsic in trans_external_path") } _ => { foreign::register_foreign_item_fn(ccx, fn_ty.abi, t, - name[]) + name.index(&FullRange)) } } } @@ -976,7 +981,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } if need_invoke(bcx) { - debug!("invoking {} at {}", bcx.val_to_string(llfn), bcx.llbb); + debug!("invoking {} at {:?}", bcx.val_to_string(llfn), bcx.llbb); for &llarg in llargs.iter() { debug!("arg: {}", bcx.val_to_string(llarg)); } @@ -990,13 +995,13 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let llresult = Invoke(bcx, llfn, - llargs[], + llargs.index(&FullRange), normal_bcx.llbb, landing_pad, Some(attributes)); return (llresult, normal_bcx); } else { - debug!("calling {} at {}", bcx.val_to_string(llfn), bcx.llbb); + debug!("calling {} at {:?}", bcx.val_to_string(llfn), bcx.llbb); for &llarg in llargs.iter() { debug!("arg: {}", bcx.val_to_string(llarg)); } @@ -1006,7 +1011,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => debuginfo::clear_source_location(bcx.fcx) }; - let llresult = Call(bcx, llfn, llargs[], Some(attributes)); + let llresult = Call(bcx, llfn, llargs.index(&FullRange), Some(attributes)); return (llresult, bcx); } } @@ -1123,7 +1128,7 @@ pub fn call_lifetime_end(cx: Block, ptr: ValueRef) { pub fn call_memcpy(cx: Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, align: u32) { let _icx = push_ctxt("call_memcpy"); let ccx = cx.ccx(); - let key = match ccx.sess().target.target.target_word_size[] { + let key = match ccx.sess().target.target.target_word_size.index(&FullRange) { "32" => "llvm.memcpy.p0i8.p0i8.i32", "64" => "llvm.memcpy.p0i8.p0i8.i64", tws => panic!("Unsupported target word size for memcpy: {}", tws), @@ -1170,7 +1175,7 @@ fn memzero<'a, 'tcx>(b: &Builder<'a, 'tcx>, llptr: ValueRef, ty: Ty<'tcx>) { let llty = type_of::type_of(ccx, ty); - let intrinsic_key = match ccx.sess().target.target.target_word_size[] { + let intrinsic_key = match ccx.sess().target.target.target_word_size.index(&FullRange) { "32" => "llvm.memset.p0i8.i32", "64" => "llvm.memset.p0i8.i64", tws => panic!("Unsupported target word size for memset: {}", tws), @@ -1658,7 +1663,7 @@ fn copy_unboxed_closure_args_to_allocas<'blk, 'tcx>( "argtuple", arg_scope_id)); let untupled_arg_types = match monomorphized_arg_types[0].sty { - ty::ty_tup(ref types) => types[], + ty::ty_tup(ref types) => types.index(&FullRange), _ => { bcx.tcx().sess.span_bug(args[0].pat.span, "first arg to `rust-call` ABI function \ @@ -1846,12 +1851,12 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let arg_datums = if abi != RustCall { create_datums_for_fn_args(&fcx, - monomorphized_arg_types[]) + monomorphized_arg_types.index(&FullRange)) } else { create_datums_for_fn_args_under_call_abi( bcx, arg_scope, - monomorphized_arg_types[]) + monomorphized_arg_types.index(&FullRange)) }; bcx = match closure_env.kind { @@ -1859,16 +1864,16 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>, copy_args_to_allocas(&fcx, arg_scope, bcx, - decl.inputs[], + decl.inputs.index(&FullRange), arg_datums) } closure::UnboxedClosure(..) => { copy_unboxed_closure_args_to_allocas( bcx, arg_scope, - decl.inputs[], + decl.inputs.index(&FullRange), arg_datums, - monomorphized_arg_types[]) + monomorphized_arg_types.index(&FullRange)) } }; @@ -1938,7 +1943,7 @@ pub fn trans_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, debug!("trans_fn(param_substs={})", param_substs.repr(ccx.tcx())); let _icx = push_ctxt("trans_fn"); let fn_ty = ty::node_id_to_type(ccx.tcx(), id); - let output_type = ty::ty_fn_ret(fn_ty); + let output_type = ty::erase_late_bound_regions(ccx.tcx(), &ty::ty_fn_ret(fn_ty)); let abi = ty::ty_fn_abi(fn_ty); trans_closure(ccx, decl, @@ -1981,11 +1986,13 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let tcx = ccx.tcx(); let result_ty = match ctor_ty.sty { - ty::ty_bare_fn(_, ref bft) => bft.sig.0.output.unwrap(), + ty::ty_bare_fn(_, ref bft) => { + ty::erase_late_bound_regions(bcx.tcx(), &bft.sig.output()).unwrap() + } _ => ccx.sess().bug( format!("trans_enum_variant_constructor: \ unexpected ctor return type {}", - ctor_ty.repr(tcx))[]) + ctor_ty.repr(tcx)).index(&FullRange)) }; // Get location to store the result. If the user does not care about @@ -2008,7 +2015,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, bcx = expr::trans_adt(bcx, result_ty, disr, - fields[], + fields.index(&FullRange), None, expr::SaveIn(llresult), call_info); @@ -2053,11 +2060,13 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx let ctor_ty = monomorphize::apply_param_substs(ccx.tcx(), param_substs, &ctor_ty); let result_ty = match ctor_ty.sty { - ty::ty_bare_fn(_, ref bft) => bft.sig.0.output, + ty::ty_bare_fn(_, ref bft) => { + ty::erase_late_bound_regions(ccx.tcx(), &bft.sig.output()) + } _ => ccx.sess().bug( format!("trans_enum_variant_or_tuple_like_struct: \ unexpected ctor return type {}", - ty_to_string(ccx.tcx(), ctor_ty))[]) + ty_to_string(ccx.tcx(), ctor_ty)).index(&FullRange)) }; let arena = TypedArena::new(); @@ -2067,9 +2076,11 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx assert!(!fcx.needs_ret_allocas); - let arg_tys = ty::ty_fn_args(ctor_ty); + let arg_tys = + ty::erase_late_bound_regions( + ccx.tcx(), &ty::ty_fn_args(ctor_ty)); - let arg_datums = create_datums_for_fn_args(&fcx, arg_tys[]); + let arg_datums = create_datums_for_fn_args(&fcx, arg_tys.index(&FullRange)); if !type_is_zero_size(fcx.ccx, result_ty.unwrap()) { let dest = fcx.get_ret_slot(bcx, result_ty, "eret_slot"); @@ -2155,7 +2166,7 @@ fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &ast::EnumDef, sp: Span, *lvlsrc.unwrap(), Some(sp), format!("enum variant is more than three times larger \ ({} bytes) than the next largest (ignoring padding)", - largest)[]); + largest).index(&FullRange)); ccx.sess().span_note(enum_def.variants[largest_index].span, "this variant is the largest"); @@ -2239,7 +2250,7 @@ pub fn update_linkage(ccx: &CrateContext, if let Some(id) = id { let item = ccx.tcx().map.get(id); if let ast_map::NodeItem(i) = item { - if let Some(name) = attr::first_attr_value_str_by_name(i.attrs[], "linkage") { + if let Some(name) = attr::first_attr_value_str_by_name(i.attrs.as_slice(), "linkage") { if let Some(linkage) = llvm_linkage_by_name(name.get()) { llvm::SetLinkage(llval, linkage); } else { @@ -2273,7 +2284,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { match item.node { ast::ItemFn(ref decl, _fn_style, abi, ref generics, ref body) => { if !generics.is_type_parameterized() { - let trans_everywhere = attr::requests_inline(item.attrs[]); + let trans_everywhere = attr::requests_inline(item.attrs.index(&FullRange)); // Ignore `trans_everywhere` for cross-crate inlined items // (`from_external`). `trans_item` will be called once for each // compilation unit that references the item, so it will still get @@ -2284,7 +2295,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { foreign::trans_rust_fn_with_foreign_abi(ccx, &**decl, &**body, - item.attrs[], + item.attrs.index(&FullRange), llfn, &Substs::trans_empty(), item.id, @@ -2296,7 +2307,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { llfn, &Substs::trans_empty(), item.id, - item.attrs[]); + item.attrs.index(&FullRange)); } update_linkage(ccx, llfn, @@ -2313,7 +2324,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { ast::ItemImpl(_, _, ref generics, _, _, ref impl_items) => { meth::trans_impl(ccx, item.ident, - impl_items[], + impl_items.index(&FullRange), generics, item.id); } @@ -2343,7 +2354,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { // Do static_assert checking. It can't really be done much earlier // because we need to get the value of the bool out of LLVM - if attr::contains_name(item.attrs[], "static_assert") { + if attr::contains_name(item.attrs.index(&FullRange), "static_assert") { if m == ast::MutMutable { ccx.sess().span_fatal(expr.span, "cannot have static_assert on a mutable \ @@ -2420,31 +2431,34 @@ fn register_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, _ => panic!("expected bare rust fn") }; - let llfn = decl_rust_fn(ccx, node_type, sym[]); + let llfn = decl_rust_fn(ccx, node_type, sym.index(&FullRange)); finish_register_fn(ccx, sp, sym, node_id, llfn); llfn } pub fn get_fn_llvm_attributes<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty<'tcx>) - -> llvm::AttrBuilder { + -> llvm::AttrBuilder +{ use middle::ty::{BrAnon, ReLateBound}; + let function_type; let (fn_sig, abi, has_env) = match fn_ty.sty { - ty::ty_bare_fn(_, ref f) => (f.sig.clone(), f.abi, false), + ty::ty_bare_fn(_, ref f) => (&f.sig, f.abi, false), ty::ty_unboxed_closure(closure_did, _, substs) => { let typer = common::NormalizingUnboxedClosureTyper::new(ccx.tcx()); - let function_type = typer.unboxed_closure_type(closure_did, substs); - (function_type.sig, RustCall, true) + function_type = typer.unboxed_closure_type(closure_did, substs); + (&function_type.sig, RustCall, true) } _ => ccx.sess().bug("expected closure or function.") }; + let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig); // Since index 0 is the return value of the llvm func, we start // at either 1 or 2 depending on whether there's an env slot or not let mut first_arg_offset = if has_env { 2 } else { 1 }; let mut attrs = llvm::AttrBuilder::new(); - let ret_ty = fn_sig.0.output; + let ret_ty = fn_sig.output; // These have an odd calling convention, so we need to manually // unpack the input ty's @@ -2452,23 +2466,23 @@ pub fn get_fn_llvm_attributes<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty< ty::ty_unboxed_closure(_, _, _) => { assert!(abi == RustCall); - match fn_sig.0.inputs[0].sty { + match fn_sig.inputs[0].sty { ty::ty_tup(ref inputs) => inputs.clone(), _ => ccx.sess().bug("expected tuple'd inputs") } }, ty::ty_bare_fn(..) if abi == RustCall => { - let mut inputs = vec![fn_sig.0.inputs[0]]; + let mut inputs = vec![fn_sig.inputs[0]]; - match fn_sig.0.inputs[1].sty { + match fn_sig.inputs[1].sty { ty::ty_tup(ref t_in) => { - inputs.push_all(t_in[]); + inputs.push_all(t_in.index(&FullRange)); inputs } _ => ccx.sess().bug("expected tuple'd inputs") } } - _ => fn_sig.0.inputs.clone() + _ => fn_sig.inputs.clone() }; if let ty::FnConverging(ret_ty) = ret_ty { @@ -2597,7 +2611,11 @@ pub fn register_fn_llvmty(ccx: &CrateContext, llfty: Type) -> ValueRef { debug!("register_fn_llvmty id={} sym={}", node_id, sym); - let llfn = decl_fn(ccx, sym[], cc, llfty, ty::FnConverging(ty::mk_nil(ccx.tcx()))); + let llfn = decl_fn(ccx, + sym.index(&FullRange), + cc, + llfty, + ty::FnConverging(ty::mk_nil(ccx.tcx()))); finish_register_fn(ccx, sp, sym, node_id, llfn); llfn } @@ -2650,7 +2668,7 @@ pub fn create_entry_wrapper(ccx: &CrateContext, let (start_fn, args) = if use_start_lang_item { let start_def_id = match ccx.tcx().lang_items.require(StartFnLangItem) { Ok(id) => id, - Err(s) => { ccx.sess().fatal(s[]); } + Err(s) => { ccx.sess().fatal(s.index(&FullRange)); } }; let start_fn = if start_def_id.krate == ast::LOCAL_CRATE { get_item_val(ccx, start_def_id.node) @@ -2738,11 +2756,11 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { } let item = ccx.tcx().map.get(id); - debug!("get_item_val: id={} item={}", id, item); + debug!("get_item_val: id={} item={:?}", id, item); let val = match item { ast_map::NodeItem(i) => { let ty = ty::node_id_to_type(ccx.tcx(), i.id); - let sym = |&:| exported_name(ccx, id, ty, i.attrs[]); + let sym = |&:| exported_name(ccx, id, ty, i.attrs.index(&FullRange)); let v = match i.node { ast::ItemStatic(_, _, ref expr) => { @@ -2765,16 +2783,16 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { } else { llvm::LLVMTypeOf(v) }; - if contains_null(sym[]) { + if contains_null(sym.index(&FullRange)) { ccx.sess().fatal( format!("Illegal null byte in export_name \ - value: `{}`", sym)[]); + value: `{}`", sym).index(&FullRange)); } let buf = CString::from_slice(sym.as_bytes()); let g = llvm::LLVMAddGlobal(ccx.llmod(), llty, buf.as_ptr()); - if attr::contains_name(i.attrs[], + if attr::contains_name(i.attrs.index(&FullRange), "thread_local") { llvm::set_thread_local(g, true); } @@ -2799,19 +2817,19 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { sym, i.id) }; - set_llvm_fn_attrs(ccx, i.attrs[], llfn); + set_llvm_fn_attrs(ccx, i.attrs.index(&FullRange), llfn); llfn } _ => panic!("get_item_val: weird result in table") }; - match attr::first_attr_value_str_by_name(i.attrs[], + match attr::first_attr_value_str_by_name(i.attrs.index(&FullRange), "link_section") { Some(sect) => { if contains_null(sect.get()) { ccx.sess().fatal(format!("Illegal null byte in link_section value: `{}`", - sect.get())[]); + sect.get()).index(&FullRange)); } unsafe { let buf = CString::from_slice(sect.get().as_bytes()); @@ -2854,7 +2872,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let abi = ccx.tcx().map.get_foreign_abi(id); let ty = ty::node_id_to_type(ccx.tcx(), ni.id); let name = foreign::link_name(&*ni); - foreign::register_foreign_item_fn(ccx, abi, ty, name.get()[]) + foreign::register_foreign_item_fn(ccx, abi, ty, name.get().index(&FullRange)) } ast::ForeignItemStatic(..) => { foreign::register_static(ccx, &*ni) @@ -2877,7 +2895,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let sym = exported_name(ccx, id, ty, - enm.attrs[]); + enm.attrs.index(&FullRange)); llfn = match enm.node { ast::ItemEnum(_, _) => { @@ -2905,7 +2923,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { id, ty, struct_item.attrs - []); + .index(&FullRange)); let llfn = register_fn(ccx, struct_item.span, sym, ctor_id, ty); set_inline_hint(llfn); @@ -2913,8 +2931,8 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { } ref variant => { - ccx.sess().bug(format!("get_item_val(): unexpected variant: {}", - variant)[]) + ccx.sess().bug(format!("get_item_val(): unexpected variant: {:?}", + variant).index(&FullRange)) } }; @@ -2935,10 +2953,10 @@ fn register_method(ccx: &CrateContext, id: ast::NodeId, m: &ast::Method) -> ValueRef { let mty = ty::node_id_to_type(ccx.tcx(), id); - let sym = exported_name(ccx, id, mty, m.attrs[]); + let sym = exported_name(ccx, id, mty, m.attrs.index(&FullRange)); let llfn = register_fn(ccx, m.span, sym, id, mty); - set_llvm_fn_attrs(ccx, m.attrs[], llfn); + set_llvm_fn_attrs(ccx, m.attrs.index(&FullRange), llfn); llfn } @@ -2977,7 +2995,7 @@ pub fn write_metadata(cx: &SharedCrateContext, krate: &ast::Crate) -> Vec { Some(compressed) => compressed, None => cx.sess().fatal("failed to compress metadata"), }.as_slice()); - let llmeta = C_bytes_in_context(cx.metadata_llcx(), compressed[]); + let llmeta = C_bytes_in_context(cx.metadata_llcx(), compressed.index(&FullRange)); let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false); let name = format!("rust_metadata_{}_{}", cx.link_meta().crate_name, @@ -3106,7 +3124,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>) let link_meta = link::build_link_meta(&tcx.sess, krate, name); let codegen_units = tcx.sess.opts.cg.codegen_units; - let shared_ccx = SharedCrateContext::new(link_meta.crate_name[], + let shared_ccx = SharedCrateContext::new(link_meta.crate_name.index(&FullRange), codegen_units, tcx, export_map, @@ -3208,7 +3226,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>) llmod: shared_ccx.metadata_llmod(), }; let formats = shared_ccx.tcx().dependency_formats.borrow().clone(); - let no_builtins = attr::contains_name(krate.attrs[], "no_builtins"); + let no_builtins = attr::contains_name(krate.attrs.index(&FullRange), "no_builtins"); let translation = CrateTranslation { modules: modules, diff --git a/src/librustc_trans/trans/builder.rs b/src/librustc_trans/trans/builder.rs index e09d36ddae923..d0eaf799af1bd 100644 --- a/src/librustc_trans/trans/builder.rs +++ b/src/librustc_trans/trans/builder.rs @@ -552,11 +552,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { for (small_vec_e, &ix) in small_vec.iter_mut().zip(ixs.iter()) { *small_vec_e = C_i32(self.ccx, ix as i32); } - self.inbounds_gep(base, small_vec[..ixs.len()]) + self.inbounds_gep(base, small_vec.index(&(0..ixs.len()))) } else { let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::>(); self.count_insn("gepi"); - self.inbounds_gep(base, v[]) + self.inbounds_gep(base, v.index(&FullRange)) } } @@ -764,8 +764,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_string(sp)); - debug!("{}", s[]); - self.add_comment(s[]); + debug!("{}", s.index(&FullRange)); + self.add_comment(s.index(&FullRange)); } } @@ -802,7 +802,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }).collect::>(); debug!("Asm Output Type: {}", self.ccx.tn().type_to_string(output)); - let fty = Type::func(argtys[], &output); + let fty = Type::func(argtys.index(&FullRange), &output); unsafe { let v = llvm::LLVMInlineAsm( fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint); diff --git a/src/librustc_trans/trans/cabi.rs b/src/librustc_trans/trans/cabi.rs index 0e38dd0e5b596..a901142467b8d 100644 --- a/src/librustc_trans/trans/cabi.rs +++ b/src/librustc_trans/trans/cabi.rs @@ -108,7 +108,7 @@ pub fn compute_abi_info(ccx: &CrateContext, atys: &[Type], rty: Type, ret_def: bool) -> FnType { - match ccx.sess().target.target.arch[] { + match ccx.sess().target.target.arch.index(&FullRange) { "x86" => cabi_x86::compute_abi_info(ccx, atys, rty, ret_def), "x86_64" => if ccx.sess().target.target.options.is_like_windows { cabi_x86_win64::compute_abi_info(ccx, atys, rty, ret_def) @@ -119,6 +119,6 @@ pub fn compute_abi_info(ccx: &CrateContext, "aarch64" => cabi_aarch64::compute_abi_info(ccx, atys, rty, ret_def), "mips" => cabi_mips::compute_abi_info(ccx, atys, rty, ret_def), a => ccx.sess().fatal((format!("unrecognized arch \"{}\" in target specification", a)) - []), + .index(&FullRange)), } } diff --git a/src/librustc_trans/trans/cabi_x86_64.rs b/src/librustc_trans/trans/cabi_x86_64.rs index 9ec0c822bf5fe..f40072d1cba3e 100644 --- a/src/librustc_trans/trans/cabi_x86_64.rs +++ b/src/librustc_trans/trans/cabi_x86_64.rs @@ -318,7 +318,7 @@ fn llreg_ty(ccx: &CrateContext, cls: &[RegClass]) -> Type { tys.push(Type::i64(ccx)); } SSEFv => { - let vec_len = llvec_len(cls[i + 1u..]); + let vec_len = llvec_len(cls.index(&((i + 1u)..))); let vec_ty = Type::vector(&Type::f32(ccx), (vec_len * 2u) as u64); tys.push(vec_ty); i += vec_len; diff --git a/src/librustc_trans/trans/callee.rs b/src/librustc_trans/trans/callee.rs index 65e6d7e1924b6..b7b486f1d0a52 100644 --- a/src/librustc_trans/trans/callee.rs +++ b/src/librustc_trans/trans/callee.rs @@ -114,7 +114,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr) expr.span, format!("type of callee is neither bare-fn nor closure: \ {}", - bcx.ty_to_string(datum.ty))[]); + bcx.ty_to_string(datum.ty)).index(&FullRange)); } } } @@ -206,8 +206,8 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr) def::DefSelfTy(..) | def::DefAssociatedPath(..) => { bcx.tcx().sess.span_bug( ref_expr.span, - format!("cannot translate def {} \ - to a callable thing!", def)[]); + format!("cannot translate def {:?} \ + to a callable thing!", def).index(&FullRange)); } } } @@ -223,7 +223,7 @@ pub fn trans_fn_ref<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let _icx = push_ctxt("trans_fn_ref"); let substs = node_id_substs(ccx, node, param_substs); - debug!("trans_fn_ref(def_id={}, node={}, substs={})", + debug!("trans_fn_ref(def_id={}, node={:?}, substs={})", def_id.repr(ccx.tcx()), node, substs.repr(ccx.tcx())); @@ -265,7 +265,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( let _icx = push_ctxt("trans_fn_pointer_shim"); let tcx = ccx.tcx(); - let bare_fn_ty = ty::normalize_ty(tcx, bare_fn_ty); + let bare_fn_ty = normalize_ty(tcx, bare_fn_ty); match ccx.fn_pointer_shims().borrow().get(&bare_fn_ty) { Some(&llval) => { return llval; } None => { } @@ -279,24 +279,22 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( // Construct the "tuply" version of `bare_fn_ty`. It takes two arguments: `self`, // which is the fn pointer, and `args`, which is the arguments tuple. - let (opt_def_id, input_tys, output_ty) = + let (opt_def_id, sig) = match bare_fn_ty.sty { ty::ty_bare_fn(opt_def_id, &ty::BareFnTy { unsafety: ast::Unsafety::Normal, - abi: synabi::Rust, - sig: ty::Binder(ty::FnSig { inputs: ref input_tys, - output: output_ty, - variadic: false })}) => - { - (opt_def_id, input_tys, output_ty) + abi: synabi::Rust, + ref sig }) => { + (opt_def_id, sig) } _ => { tcx.sess.bug(format!("trans_fn_pointer_shim invoked on invalid type: {}", - bare_fn_ty.repr(tcx))[]); + bare_fn_ty.repr(tcx)).index(&FullRange)); } }; - let tuple_input_ty = ty::mk_tup(tcx, input_tys.to_vec()); + let sig = ty::erase_late_bound_regions(tcx, sig); + let tuple_input_ty = ty::mk_tup(tcx, sig.inputs.to_vec()); let tuple_fn_ty = ty::mk_bare_fn(tcx, opt_def_id, tcx.mk_bare_fn(ty::BareFnTy { @@ -305,7 +303,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( sig: ty::Binder(ty::FnSig { inputs: vec![bare_fn_ty_ref, tuple_input_ty], - output: output_ty, + output: sig.output, variadic: false })})); debug!("tuple_fn_ty: {}", tuple_fn_ty.repr(tcx)); @@ -317,7 +315,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( let llfn = decl_internal_rust_fn(ccx, tuple_fn_ty, - function_name[]); + function_name.index(&FullRange)); // let block_arena = TypedArena::new(); @@ -326,11 +324,11 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( llfn, ast::DUMMY_NODE_ID, false, - output_ty, + sig.output, &empty_substs, None, &block_arena); - let mut bcx = init_function(&fcx, false, output_ty); + let mut bcx = init_function(&fcx, false, sig.output); // the first argument (`self`) will be ptr to the the fn pointer let llfnpointer = @@ -338,24 +336,24 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( // the remaining arguments will be the untupled values let llargs: Vec<_> = - input_tys.iter() + sig.inputs.iter() .enumerate() .map(|(i, _)| get_param(fcx.llfn, fcx.arg_pos(i+1) as u32)) .collect(); assert!(!fcx.needs_ret_allocas); let dest = fcx.llretslotptr.get().map(|_| - expr::SaveIn(fcx.get_ret_slot(bcx, output_ty, "ret_slot")) + expr::SaveIn(fcx.get_ret_slot(bcx, sig.output, "ret_slot")) ); bcx = trans_call_inner(bcx, None, bare_fn_ty, |bcx, _| Callee { bcx: bcx, data: Fn(llfnpointer) }, - ArgVals(llargs[]), + ArgVals(llargs.index(&FullRange)), dest).bcx; - finish_fn(&fcx, bcx, output_ty); + finish_fn(&fcx, bcx, sig.output); ccx.fn_pointer_shims().borrow_mut().insert(bare_fn_ty, llfn); @@ -386,7 +384,7 @@ pub fn trans_fn_ref_with_substs<'a, 'tcx>( let _icx = push_ctxt("trans_fn_ref_with_substs"); let tcx = ccx.tcx(); - debug!("trans_fn_ref_with_substs(def_id={}, node={}, \ + debug!("trans_fn_ref_with_substs(def_id={}, node={:?}, \ param_substs={}, substs={})", def_id.repr(tcx), node, @@ -668,7 +666,10 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, let mut bcx = callee.bcx; let (abi, ret_ty) = match callee_ty.sty { - ty::ty_bare_fn(_, ref f) => (f.abi, f.sig.0.output), + ty::ty_bare_fn(_, ref f) => { + let output = ty::erase_late_bound_regions(bcx.tcx(), &f.sig.output()); + (f.abi, output) + } _ => panic!("expected bare rust fn or closure in trans_call_inner") }; @@ -775,7 +776,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, // Invoke the actual rust fn and update bcx/llresult. let (llret, b) = base::invoke(bcx, llfn, - llargs[], + llargs.index(&FullRange), callee_ty, call_info); bcx = b; @@ -814,7 +815,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, bcx = foreign::trans_native_call(bcx, callee_ty, llfn, opt_llretslot.unwrap(), - llargs[], arg_tys); + llargs.index(&FullRange), arg_tys); } fcx.pop_and_trans_custom_cleanup_scope(bcx, arg_cleanup_scope); @@ -865,13 +866,18 @@ fn trans_args_under_call_abi<'blk, 'tcx>( llargs: &mut Vec, arg_cleanup_scope: cleanup::ScopeId, ignore_self: bool) - -> Block<'blk, 'tcx> { + -> Block<'blk, 'tcx> +{ + let args = + ty::erase_late_bound_regions( + bcx.tcx(), &ty::ty_fn_args(fn_ty)); + // Translate the `self` argument first. if !ignore_self { let arg_datum = unpack_datum!(bcx, expr::trans(bcx, &*arg_exprs[0])); llargs.push(unpack_result!(bcx, { trans_arg_datum(bcx, - ty::ty_fn_args(fn_ty)[0], + args[0], arg_datum, arg_cleanup_scope, DontAutorefArg) @@ -926,7 +932,7 @@ fn trans_overloaded_call_args<'blk, 'tcx>( ignore_self: bool) -> Block<'blk, 'tcx> { // Translate the `self` argument first. - let arg_tys = ty::ty_fn_args(fn_ty); + let arg_tys = ty::erase_late_bound_regions(bcx.tcx(), &ty::ty_fn_args(fn_ty)); if !ignore_self { let arg_datum = unpack_datum!(bcx, expr::trans(bcx, arg_exprs[0])); llargs.push(unpack_result!(bcx, { @@ -974,7 +980,7 @@ pub fn trans_args<'a, 'blk, 'tcx>(cx: Block<'blk, 'tcx>, debug!("trans_args(abi={})", abi); let _icx = push_ctxt("trans_args"); - let arg_tys = ty::ty_fn_args(fn_ty); + let arg_tys = ty::erase_late_bound_regions(cx.tcx(), &ty::ty_fn_args(fn_ty)); let variadic = ty::fn_is_variadic(fn_ty); let mut bcx = cx; diff --git a/src/librustc_trans/trans/cleanup.rs b/src/librustc_trans/trans/cleanup.rs index 79a5898e3d33e..92a96cd02b501 100644 --- a/src/librustc_trans/trans/cleanup.rs +++ b/src/librustc_trans/trans/cleanup.rs @@ -24,8 +24,7 @@ use trans::common; use trans::common::{Block, FunctionContext, ExprId, NodeInfo}; use trans::debuginfo; use trans::glue; -// Temporary due to slicing syntax hacks (KILLME) -//use middle::region; +use middle::region; use trans::type_::Type; use middle::ty::{self, Ty}; use std::fmt; @@ -129,8 +128,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { // excluding id's that correspond to closure bodies only). For // now we just say that if there is already an AST scope on the stack, // this new AST scope had better be its immediate child. - // Temporarily removed due to slicing syntax hacks (KILLME). - /*let top_scope = self.top_ast_scope(); + let top_scope = self.top_ast_scope(); if top_scope.is_some() { assert_eq!(self.ccx .tcx() @@ -138,7 +136,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { .opt_encl_scope(region::CodeExtent::from_node_id(debug_loc.id)) .map(|s|s.node_id()), top_scope); - }*/ + } self.push_scope(CleanupScope::new(AstScopeKind(debug_loc.id), Some(debug_loc))); @@ -229,7 +227,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { bcx: Block<'blk, 'tcx>, custom_scope: CustomScopeIndex) -> Block<'blk, 'tcx> { - debug!("pop_and_trans_custom_cleanup_scope({})", custom_scope); + debug!("pop_and_trans_custom_cleanup_scope({:?})", custom_scope); assert!(self.is_valid_to_pop_custom_scope(custom_scope)); let scope = self.pop_scope(); @@ -267,7 +265,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { ptr: val, }; - debug!("schedule_lifetime_end({}, val={})", + debug!("schedule_lifetime_end({:?}, val={})", cleanup_scope, self.ccx.tn().val_to_string(val)); @@ -288,7 +286,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { zero: false }; - debug!("schedule_drop_mem({}, val={}, ty={})", + debug!("schedule_drop_mem({:?}, val={}, ty={})", cleanup_scope, self.ccx.tn().val_to_string(val), ty.repr(self.ccx.tcx())); @@ -310,7 +308,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { zero: true }; - debug!("schedule_drop_and_zero_mem({}, val={}, ty={}, zero={})", + debug!("schedule_drop_and_zero_mem({:?}, val={}, ty={}, zero={})", cleanup_scope, self.ccx.tn().val_to_string(val), ty.repr(self.ccx.tcx()), @@ -334,7 +332,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { zero: false }; - debug!("schedule_drop_immediate({}, val={}, ty={})", + debug!("schedule_drop_immediate({:?}, val={}, ty={:?})", cleanup_scope, self.ccx.tn().val_to_string(val), ty.repr(self.ccx.tcx())); @@ -350,7 +348,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { content_ty: Ty<'tcx>) { let drop = box FreeValue { ptr: val, heap: heap, content_ty: content_ty }; - debug!("schedule_free_value({}, val={}, heap={})", + debug!("schedule_free_value({:?}, val={}, heap={:?})", cleanup_scope, self.ccx.tn().val_to_string(val), heap); @@ -367,7 +365,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { heap: Heap) { let drop = box FreeSlice { ptr: val, size: size, align: align, heap: heap }; - debug!("schedule_free_slice({}, val={}, heap={})", + debug!("schedule_free_slice({:?}, val={}, heap={:?})", cleanup_scope, self.ccx.tn().val_to_string(val), heap); @@ -406,7 +404,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { self.ccx.sess().bug( format!("no cleanup scope {} found", - self.ccx.tcx().map.node_to_string(cleanup_scope))[]); + self.ccx.tcx().map.node_to_string(cleanup_scope)).index(&FullRange)); } /// Schedules a cleanup to occur in the top-most scope, which must be a temporary scope. @@ -551,7 +549,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx fn trans_cleanups_to_exit_scope(&'blk self, label: EarlyExitLabel) -> BasicBlockRef { - debug!("trans_cleanups_to_exit_scope label={} scopes={}", + debug!("trans_cleanups_to_exit_scope label={:?} scopes={}", label, self.scopes_len()); let orig_scopes_len = self.scopes_len(); @@ -588,7 +586,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx LoopExit(id, _) => { self.ccx.sess().bug(format!( "cannot exit from scope {}, \ - not in scope", id)[]); + not in scope", id).index(&FullRange)); } } } @@ -657,7 +655,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx let name = scope.block_name("clean"); debug!("generating cleanups for {}", name); let bcx_in = self.new_block(label.is_unwind(), - name[], + name.index(&FullRange), None); let mut bcx_out = bcx_in; for cleanup in scope.cleanups.iter().rev() { @@ -677,7 +675,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx self.push_scope(scope); } - debug!("trans_cleanups_to_exit_scope: prev_llbb={}", prev_llbb); + debug!("trans_cleanups_to_exit_scope: prev_llbb={:?}", prev_llbb); assert_eq!(self.scopes_len(), orig_scopes_len); prev_llbb @@ -704,7 +702,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx Some(llbb) => { return llbb; } None => { let name = last_scope.block_name("unwind"); - pad_bcx = self.new_block(true, name[], None); + pad_bcx = self.new_block(true, name.index(&FullRange), None); last_scope.cached_landing_pad = Some(pad_bcx.llbb); } } @@ -1020,12 +1018,12 @@ pub fn temporary_scope(tcx: &ty::ctxt, match tcx.region_maps.temporary_scope(id) { Some(scope) => { let r = AstScope(scope.node_id()); - debug!("temporary_scope({}) = {}", id, r); + debug!("temporary_scope({}) = {:?}", id, r); r } None => { tcx.sess.bug(format!("no temporary scope available for expr {}", - id)[]) + id).index(&FullRange)) } } } @@ -1034,7 +1032,7 @@ pub fn var_scope(tcx: &ty::ctxt, id: ast::NodeId) -> ScopeId { let r = AstScope(tcx.region_maps.var_scope(id).node_id()); - debug!("var_scope({}) = {}", id, r); + debug!("var_scope({}) = {:?}", id, r); r } diff --git a/src/librustc_trans/trans/closure.rs b/src/librustc_trans/trans/closure.rs index 6f2def16e7674..ad2ed67b22c9a 100644 --- a/src/librustc_trans/trans/closure.rs +++ b/src/librustc_trans/trans/closure.rs @@ -108,7 +108,7 @@ pub struct EnvValue<'tcx> { impl<'tcx> EnvValue<'tcx> { pub fn to_string<'a>(&self, ccx: &CrateContext<'a, 'tcx>) -> String { - format!("{}({})", self.action, self.datum.to_string(ccx)) + format!("{:?}({})", self.action, self.datum.to_string(ccx)) } } @@ -154,7 +154,7 @@ pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let tcx = ccx.tcx(); // compute the type of the closure - let cdata_ty = mk_closure_tys(tcx, bound_values[]); + let cdata_ty = mk_closure_tys(tcx, bound_values.index(&FullRange)); // cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a // tuple. This could be a ptr in uniq or a box or on stack, @@ -183,7 +183,7 @@ pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if ccx.sess().asm_comments() { add_comment(bcx, format!("Copy {} into closure", - bv.to_string(ccx))[]); + bv.to_string(ccx)).index(&FullRange)); } let bound_data = GEPi(bcx, llbox, &[0u, abi::BOX_FIELD_BODY, i]); @@ -420,7 +420,7 @@ pub fn trans_expr_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let s = tcx.map.with_path(id, |path| { mangle_internal_name_by_path_and_seq(path, "closure") }); - let llfn = decl_internal_rust_fn(ccx, fty, s[]); + let llfn = decl_internal_rust_fn(ccx, fty, s.index(&FullRange)); // set an inline hint for all closures set_inline_hint(llfn); @@ -442,9 +442,9 @@ pub fn trans_expr_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.fcx.param_substs, id, &[], - ty::ty_fn_ret(fty), + ty::erase_late_bound_regions(ccx.tcx(), &ty::ty_fn_ret(fty)), ty::ty_fn_abi(fty), - ClosureEnv::new(freevars[], + ClosureEnv::new(freevars.index(&FullRange), BoxedClosure(cdata_ty, store))); fill_fn_pair(bcx, dest_addr, llfn, llbox); bcx @@ -466,7 +466,7 @@ pub fn get_or_create_declaration_if_unboxed_closure<'a, 'tcx>(ccx: &CrateContext // Normalize type so differences in regions and typedefs don't cause // duplicate declarations - let function_type = ty::normalize_ty(ccx.tcx(), function_type); + let function_type = normalize_ty(ccx.tcx(), function_type); let params = match function_type.sty { ty::ty_unboxed_closure(_, _, ref substs) => substs.types.clone(), _ => unreachable!() @@ -489,13 +489,13 @@ pub fn get_or_create_declaration_if_unboxed_closure<'a, 'tcx>(ccx: &CrateContext mangle_internal_name_by_path_and_seq(path, "unboxed_closure") }); - let llfn = decl_internal_rust_fn(ccx, function_type, symbol[]); + let llfn = decl_internal_rust_fn(ccx, function_type, symbol.index(&FullRange)); // set an inline hint for all closures set_inline_hint(llfn); debug!("get_or_create_declaration_if_unboxed_closure(): inserting new \ - closure {} (type {})", + closure {:?} (type {})", mono_id, ccx.tn().type_to_string(val_ty(llfn))); ccx.unboxed_closure_vals().borrow_mut().insert(mono_id, llfn); @@ -533,6 +533,8 @@ pub fn trans_unboxed_closure<'blk, 'tcx>( ty::with_freevars(bcx.tcx(), id, |fv| fv.iter().map(|&fv| fv).collect()); let freevar_mode = bcx.tcx().capture_mode(id); + let sig = ty::erase_late_bound_regions(bcx.tcx(), &function_type.sig); + trans_closure(bcx.ccx(), decl, body, @@ -540,9 +542,9 @@ pub fn trans_unboxed_closure<'blk, 'tcx>( bcx.fcx.param_substs, id, &[], - function_type.sig.0.output, + sig.output, function_type.abi, - ClosureEnv::new(freevars[], + ClosureEnv::new(freevars.index(&FullRange), UnboxedClosure(freevar_mode))); // Don't hoist this to the top of the function. It's perfectly legitimate @@ -579,3 +581,4 @@ pub fn trans_unboxed_closure<'blk, 'tcx>( bcx } + diff --git a/src/librustc_trans/trans/common.rs b/src/librustc_trans/trans/common.rs index 094f98e988aad..237fc1856369b 100644 --- a/src/librustc_trans/trans/common.rs +++ b/src/librustc_trans/trans/common.rs @@ -58,56 +58,111 @@ use util::nodemap::FnvHashSet; pub use trans::context::CrateContext; +/// Returns an equivalent type with all the typedefs and self regions removed. +pub fn normalize_ty<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { + let u = TypeNormalizer(cx).fold_ty(ty); + debug!("normalize_ty({}) = {}", + ty.repr(cx), u.repr(cx)); + return u; + + struct TypeNormalizer<'a, 'tcx: 'a>(&'a ty::ctxt<'tcx>); + + impl<'a, 'tcx> TypeFolder<'tcx> for TypeNormalizer<'a, 'tcx> { + fn tcx(&self) -> &ty::ctxt<'tcx> { self.0 } + + fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { + match self.tcx().normalized_cache.borrow().get(&ty).cloned() { + None => {} + Some(u) => return u + } + + let t_norm = ty_fold::super_fold_ty(self, ty); + self.tcx().normalized_cache.borrow_mut().insert(ty, t_norm); + return t_norm; + } + + fn fold_binder(&mut self, t: &ty::Binder) -> ty::Binder + where T : TypeFoldable<'tcx> + Repr<'tcx> + { + // FIXME(#20526) this should replace `enter_region_binder`/`exit_region_binder`. + let u = ty::anonymize_late_bound_regions(self.tcx(), t); + ty_fold::super_fold_binder(self, &u) + } + + fn fold_region(&mut self, r: ty::Region) -> ty::Region { + // because late-bound regions affect subtyping, we can't + // erase the bound/free distinction, but we can replace + // all free regions with 'static. + // + // Note that we *CAN* replace early-bound regions -- the + // type system never "sees" those, they get substituted + // away. In trans, they will always be erased to 'static + // whenever a substitution occurs. + match r { + ty::ReLateBound(..) => r, + _ => ty::ReStatic + } + } + + fn fold_substs(&mut self, + substs: &subst::Substs<'tcx>) + -> subst::Substs<'tcx> { + subst::Substs { regions: subst::ErasedRegions, + types: substs.types.fold_with(self) } + } + } +} + // Is the type's representation size known at compile time? pub fn type_is_sized<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool { - ty::type_contents(cx, ty).is_sized(cx) +ty::type_contents(cx, ty).is_sized(cx) } pub fn lltype_is_sized<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool { - match ty.sty { - ty::ty_open(_) => true, - _ => type_is_sized(cx, ty), - } +match ty.sty { + ty::ty_open(_) => true, + _ => type_is_sized(cx, ty), +} } pub fn type_is_fat_ptr<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool { - match ty.sty { - ty::ty_ptr(ty::mt{ty, ..}) | - ty::ty_rptr(_, ty::mt{ty, ..}) | - ty::ty_uniq(ty) => { - !type_is_sized(cx, ty) - } - _ => { - false - } +match ty.sty { + ty::ty_ptr(ty::mt{ty, ..}) | + ty::ty_rptr(_, ty::mt{ty, ..}) | + ty::ty_uniq(ty) => { + !type_is_sized(cx, ty) } + _ => { + false + } +} } // Return the smallest part of `ty` which is unsized. Fails if `ty` is sized. // 'Smallest' here means component of the static representation of the type; not // the size of an object at runtime. pub fn unsized_part_of_type<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { - match ty.sty { - ty::ty_str | ty::ty_trait(..) | ty::ty_vec(..) => ty, - ty::ty_struct(def_id, substs) => { - let unsized_fields: Vec<_> = - ty::struct_fields(cx, def_id, substs) - .iter() - .map(|f| f.mt.ty) - .filter(|ty| !type_is_sized(cx, *ty)) - .collect(); - - // Exactly one of the fields must be unsized. - assert!(unsized_fields.len() == 1); - - unsized_part_of_type(cx, unsized_fields[0]) - } - _ => { - assert!(type_is_sized(cx, ty), - "unsized_part_of_type failed even though ty is unsized"); - panic!("called unsized_part_of_type with sized ty"); - } +match ty.sty { + ty::ty_str | ty::ty_trait(..) | ty::ty_vec(..) => ty, + ty::ty_struct(def_id, substs) => { + let unsized_fields: Vec<_> = + ty::struct_fields(cx, def_id, substs) + .iter() + .map(|f| f.mt.ty) + .filter(|ty| !type_is_sized(cx, *ty)) + .collect(); + + // Exactly one of the fields must be unsized. + assert!(unsized_fields.len() == 1); + + unsized_part_of_type(cx, unsized_fields[0]) } + _ => { + assert!(type_is_sized(cx, ty), + "unsized_part_of_type failed even though ty is unsized"); + panic!("called unsized_part_of_type with sized ty"); + } +} } // Some things don't need cleanups during unwinding because the @@ -115,93 +170,93 @@ pub fn unsized_part_of_type<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> // that only contain scalars and shared boxes can avoid unwind // cleanups. pub fn type_needs_unwind_cleanup<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool { - return memoized(ccx.needs_unwind_cleanup_cache(), ty, |ty| { - type_needs_unwind_cleanup_(ccx.tcx(), ty, &mut FnvHashSet::new()) - }); +return memoized(ccx.needs_unwind_cleanup_cache(), ty, |ty| { + type_needs_unwind_cleanup_(ccx.tcx(), ty, &mut FnvHashSet::new()) +}); + +fn type_needs_unwind_cleanup_<'tcx>(tcx: &ty::ctxt<'tcx>, + ty: Ty<'tcx>, + tycache: &mut FnvHashSet>) + -> bool +{ + // Prevent infinite recursion + if !tycache.insert(ty) { + return false; + } - fn type_needs_unwind_cleanup_<'tcx>(tcx: &ty::ctxt<'tcx>, - ty: Ty<'tcx>, - tycache: &mut FnvHashSet>) - -> bool - { - // Prevent infinite recursion - if !tycache.insert(ty) { - return false; - } + let mut needs_unwind_cleanup = false; + ty::maybe_walk_ty(ty, |ty| { + needs_unwind_cleanup |= match ty.sty { + ty::ty_bool | ty::ty_int(_) | ty::ty_uint(_) | + ty::ty_float(_) | ty::ty_tup(_) | ty::ty_ptr(_) => false, - let mut needs_unwind_cleanup = false; - ty::maybe_walk_ty(ty, |ty| { - needs_unwind_cleanup |= match ty.sty { - ty::ty_bool | ty::ty_int(_) | ty::ty_uint(_) | - ty::ty_float(_) | ty::ty_tup(_) | ty::ty_ptr(_) => false, - - ty::ty_enum(did, substs) => - ty::enum_variants(tcx, did).iter().any(|v| - v.args.iter().any(|&aty| { - let t = aty.subst(tcx, substs); - type_needs_unwind_cleanup_(tcx, t, tycache) - }) - ), - - _ => true - }; - !needs_unwind_cleanup - }); - needs_unwind_cleanup - } + ty::ty_enum(did, substs) => + ty::enum_variants(tcx, did).iter().any(|v| + v.args.iter().any(|&aty| { + let t = aty.subst(tcx, substs); + type_needs_unwind_cleanup_(tcx, t, tycache) + }) + ), + + _ => true + }; + !needs_unwind_cleanup + }); + needs_unwind_cleanup +} } pub fn type_needs_drop<'tcx>(cx: &ty::ctxt<'tcx>, - ty: Ty<'tcx>) - -> bool { - ty::type_contents(cx, ty).needs_drop(cx) + ty: Ty<'tcx>) + -> bool { +ty::type_contents(cx, ty).needs_drop(cx) } fn type_is_newtype_immediate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - ty: Ty<'tcx>) -> bool { - match ty.sty { - ty::ty_struct(def_id, substs) => { - let fields = ty::struct_fields(ccx.tcx(), def_id, substs); - fields.len() == 1 && - fields[0].name == - token::special_idents::unnamed_field.name && - type_is_immediate(ccx, fields[0].mt.ty) - } - _ => false + ty: Ty<'tcx>) -> bool { +match ty.sty { + ty::ty_struct(def_id, substs) => { + let fields = ty::struct_fields(ccx.tcx(), def_id, substs); + fields.len() == 1 && + fields[0].name == + token::special_idents::unnamed_field.name && + type_is_immediate(ccx, fields[0].mt.ty) } + _ => false +} } pub fn type_is_immediate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool { - use trans::machine::llsize_of_alloc; - use trans::type_of::sizing_type_of; - - let tcx = ccx.tcx(); - let simple = ty::type_is_scalar(ty) || - ty::type_is_unique(ty) || ty::type_is_region_ptr(ty) || - type_is_newtype_immediate(ccx, ty) || - ty::type_is_simd(tcx, ty); - if simple && !type_is_fat_ptr(tcx, ty) { - return true; - } - if !type_is_sized(tcx, ty) { - return false; - } - match ty.sty { - ty::ty_struct(..) | ty::ty_enum(..) | ty::ty_tup(..) | - ty::ty_unboxed_closure(..) => { - let llty = sizing_type_of(ccx, ty); - llsize_of_alloc(ccx, llty) <= llsize_of_alloc(ccx, ccx.int_type()) - } - _ => type_is_zero_size(ccx, ty) +use trans::machine::llsize_of_alloc; +use trans::type_of::sizing_type_of; + +let tcx = ccx.tcx(); +let simple = ty::type_is_scalar(ty) || + ty::type_is_unique(ty) || ty::type_is_region_ptr(ty) || + type_is_newtype_immediate(ccx, ty) || + ty::type_is_simd(tcx, ty); +if simple && !type_is_fat_ptr(tcx, ty) { + return true; +} +if !type_is_sized(tcx, ty) { + return false; +} +match ty.sty { + ty::ty_struct(..) | ty::ty_enum(..) | ty::ty_tup(..) | + ty::ty_unboxed_closure(..) => { + let llty = sizing_type_of(ccx, ty); + llsize_of_alloc(ccx, llty) <= llsize_of_alloc(ccx, ccx.int_type()) } + _ => type_is_zero_size(ccx, ty) +} } /// Identify types which have size zero at runtime. pub fn type_is_zero_size<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool { - use trans::machine::llsize_of_alloc; - use trans::type_of::sizing_type_of; - let llty = sizing_type_of(ccx, ty); - llsize_of_alloc(ccx, llty) == 0 +use trans::machine::llsize_of_alloc; +use trans::type_of::sizing_type_of; +let llty = sizing_type_of(ccx, ty); +llsize_of_alloc(ccx, llty) == 0 } /// Identifies types which we declare to be equivalent to `void` in C for the purpose of function @@ -209,85 +264,85 @@ pub fn type_is_zero_size<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) - /// zero-size, but not all zero-size types use a `void` return type (in order to aid with C ABI /// compatibility). pub fn return_type_is_void(ccx: &CrateContext, ty: Ty) -> bool { - ty::type_is_nil(ty) || ty::type_is_empty(ccx.tcx(), ty) +ty::type_is_nil(ty) || ty::type_is_empty(ccx.tcx(), ty) } /// Generates a unique symbol based off the name given. This is used to create /// unique symbols for things like closures. pub fn gensym_name(name: &str) -> PathElem { - let num = token::gensym(name).uint(); - // use one colon which will get translated to a period by the mangler, and - // we're guaranteed that `num` is globally unique for this crate. - PathName(token::gensym(format!("{}:{}", name, num)[])) +let num = token::gensym(name).uint(); +// use one colon which will get translated to a period by the mangler, and +// we're guaranteed that `num` is globally unique for this crate. +PathName(token::gensym(format!("{}:{}", name, num).index(&FullRange))) } #[derive(Copy)] pub struct tydesc_info<'tcx> { - pub ty: Ty<'tcx>, - pub tydesc: ValueRef, - pub size: ValueRef, - pub align: ValueRef, - pub name: ValueRef, +pub ty: Ty<'tcx>, +pub tydesc: ValueRef, +pub size: ValueRef, +pub align: ValueRef, +pub name: ValueRef, } /* - * A note on nomenclature of linking: "extern", "foreign", and "upcall". - * - * An "extern" is an LLVM symbol we wind up emitting an undefined external - * reference to. This means "we don't have the thing in this compilation unit, - * please make sure you link it in at runtime". This could be a reference to - * C code found in a C library, or rust code found in a rust crate. - * - * Most "externs" are implicitly declared (automatically) as a result of a - * user declaring an extern _module_ dependency; this causes the rust driver - * to locate an extern crate, scan its compilation metadata, and emit extern - * declarations for any symbols used by the declaring crate. - * - * A "foreign" is an extern that references C (or other non-rust ABI) code. - * There is no metadata to scan for extern references so in these cases either - * a header-digester like bindgen, or manual function prototypes, have to - * serve as declarators. So these are usually given explicitly as prototype - * declarations, in rust code, with ABI attributes on them noting which ABI to - * link via. - * - * An "upcall" is a foreign call generated by the compiler (not corresponding - * to any user-written call in the code) into the runtime library, to perform - * some helper task such as bringing a task to life, allocating memory, etc. - * - */ +* A note on nomenclature of linking: "extern", "foreign", and "upcall". +* +* An "extern" is an LLVM symbol we wind up emitting an undefined external +* reference to. This means "we don't have the thing in this compilation unit, +* please make sure you link it in at runtime". This could be a reference to +* C code found in a C library, or rust code found in a rust crate. +* +* Most "externs" are implicitly declared (automatically) as a result of a +* user declaring an extern _module_ dependency; this causes the rust driver +* to locate an extern crate, scan its compilation metadata, and emit extern +* declarations for any symbols used by the declaring crate. +* +* A "foreign" is an extern that references C (or other non-rust ABI) code. +* There is no metadata to scan for extern references so in these cases either +* a header-digester like bindgen, or manual function prototypes, have to +* serve as declarators. So these are usually given explicitly as prototype +* declarations, in rust code, with ABI attributes on them noting which ABI to +* link via. +* +* An "upcall" is a foreign call generated by the compiler (not corresponding +* to any user-written call in the code) into the runtime library, to perform +* some helper task such as bringing a task to life, allocating memory, etc. +* +*/ #[derive(Copy)] pub struct NodeInfo { - pub id: ast::NodeId, - pub span: Span, +pub id: ast::NodeId, +pub span: Span, } pub fn expr_info(expr: &ast::Expr) -> NodeInfo { - NodeInfo { id: expr.id, span: expr.span } +NodeInfo { id: expr.id, span: expr.span } } pub struct BuilderRef_res { - pub b: BuilderRef, +pub b: BuilderRef, } impl Drop for BuilderRef_res { - fn drop(&mut self) { - unsafe { - llvm::LLVMDisposeBuilder(self.b); - } +fn drop(&mut self) { + unsafe { + llvm::LLVMDisposeBuilder(self.b); } } +} pub fn BuilderRef_res(b: BuilderRef) -> BuilderRef_res { - BuilderRef_res { - b: b - } +BuilderRef_res { + b: b +} } pub type ExternMap = FnvHashMap; pub fn validate_substs(substs: &Substs) { - assert!(substs.types.all(|t| !ty::type_needs_infer(*t))); +assert!(substs.types.all(|t| !ty::type_needs_infer(*t))); } // work around bizarre resolve errors @@ -297,183 +352,183 @@ type LvalueDatum<'tcx> = datum::Datum<'tcx, datum::Lvalue>; // Function context. Every LLVM function we create will have one of // these. pub struct FunctionContext<'a, 'tcx: 'a> { - // The ValueRef returned from a call to llvm::LLVMAddFunction; the - // address of the first instruction in the sequence of - // instructions for this function that will go in the .text - // section of the executable we're generating. - pub llfn: ValueRef, +// The ValueRef returned from a call to llvm::LLVMAddFunction; the +// address of the first instruction in the sequence of +// instructions for this function that will go in the .text +// section of the executable we're generating. +pub llfn: ValueRef, - // always an empty parameter-environment - pub param_env: ty::ParameterEnvironment<'a, 'tcx>, +// always an empty parameter-environment +pub param_env: ty::ParameterEnvironment<'a, 'tcx>, - // The environment argument in a closure. - pub llenv: Option, +// The environment argument in a closure. +pub llenv: Option, - // A pointer to where to store the return value. If the return type is - // immediate, this points to an alloca in the function. Otherwise, it's a - // pointer to the hidden first parameter of the function. After function - // construction, this should always be Some. - pub llretslotptr: Cell>, +// A pointer to where to store the return value. If the return type is +// immediate, this points to an alloca in the function. Otherwise, it's a +// pointer to the hidden first parameter of the function. After function +// construction, this should always be Some. +pub llretslotptr: Cell>, - // These pub elements: "hoisted basic blocks" containing - // administrative activities that have to happen in only one place in - // the function, due to LLVM's quirks. - // A marker for the place where we want to insert the function's static - // allocas, so that LLVM will coalesce them into a single alloca call. - pub alloca_insert_pt: Cell>, - pub llreturn: Cell>, +// These pub elements: "hoisted basic blocks" containing +// administrative activities that have to happen in only one place in +// the function, due to LLVM's quirks. +// A marker for the place where we want to insert the function's static +// allocas, so that LLVM will coalesce them into a single alloca call. +pub alloca_insert_pt: Cell>, +pub llreturn: Cell>, - // If the function has any nested return's, including something like: - // fn foo() -> Option { Some(Foo { x: return None }) }, then - // we use a separate alloca for each return - pub needs_ret_allocas: bool, +// If the function has any nested return's, including something like: +// fn foo() -> Option { Some(Foo { x: return None }) }, then +// we use a separate alloca for each return +pub needs_ret_allocas: bool, - // The a value alloca'd for calls to upcalls.rust_personality. Used when - // outputting the resume instruction. - pub personality: Cell>, +// The a value alloca'd for calls to upcalls.rust_personality. Used when +// outputting the resume instruction. +pub personality: Cell>, - // True if the caller expects this fn to use the out pointer to - // return. Either way, your code should write into the slot llretslotptr - // points to, but if this value is false, that slot will be a local alloca. - pub caller_expects_out_pointer: bool, +// True if the caller expects this fn to use the out pointer to +// return. Either way, your code should write into the slot llretslotptr +// points to, but if this value is false, that slot will be a local alloca. +pub caller_expects_out_pointer: bool, - // Maps the DefId's for local variables to the allocas created for - // them in llallocas. - pub lllocals: RefCell>>, +// Maps the DefId's for local variables to the allocas created for +// them in llallocas. +pub lllocals: RefCell>>, - // Same as above, but for closure upvars - pub llupvars: RefCell>, +// Same as above, but for closure upvars +pub llupvars: RefCell>, - // The NodeId of the function, or -1 if it doesn't correspond to - // a user-defined function. - pub id: ast::NodeId, +// The NodeId of the function, or -1 if it doesn't correspond to +// a user-defined function. +pub id: ast::NodeId, - // If this function is being monomorphized, this contains the type - // substitutions used. - pub param_substs: &'a Substs<'tcx>, +// If this function is being monomorphized, this contains the type +// substitutions used. +pub param_substs: &'a Substs<'tcx>, - // The source span and nesting context where this function comes from, for - // error reporting and symbol generation. - pub span: Option, +// The source span and nesting context where this function comes from, for +// error reporting and symbol generation. +pub span: Option, - // The arena that blocks are allocated from. - pub block_arena: &'a TypedArena>, +// The arena that blocks are allocated from. +pub block_arena: &'a TypedArena>, - // This function's enclosing crate context. - pub ccx: &'a CrateContext<'a, 'tcx>, +// This function's enclosing crate context. +pub ccx: &'a CrateContext<'a, 'tcx>, - // Used and maintained by the debuginfo module. - pub debug_context: debuginfo::FunctionDebugContext, +// Used and maintained by the debuginfo module. +pub debug_context: debuginfo::FunctionDebugContext, - // Cleanup scopes. - pub scopes: RefCell>>, +// Cleanup scopes. +pub scopes: RefCell>>, - pub cfg: Option, +pub cfg: Option, } impl<'a, 'tcx> FunctionContext<'a, 'tcx> { - pub fn arg_pos(&self, arg: uint) -> uint { - let arg = self.env_arg_pos() + arg; - if self.llenv.is_some() { - arg + 1 - } else { - arg - } +pub fn arg_pos(&self, arg: uint) -> uint { + let arg = self.env_arg_pos() + arg; + if self.llenv.is_some() { + arg + 1 + } else { + arg } +} - pub fn env_arg_pos(&self) -> uint { - if self.caller_expects_out_pointer { - 1u - } else { - 0u - } +pub fn env_arg_pos(&self) -> uint { + if self.caller_expects_out_pointer { + 1u + } else { + 0u } +} - pub fn cleanup(&self) { - unsafe { - llvm::LLVMInstructionEraseFromParent(self.alloca_insert_pt - .get() - .unwrap()); - } +pub fn cleanup(&self) { + unsafe { + llvm::LLVMInstructionEraseFromParent(self.alloca_insert_pt + .get() + .unwrap()); } +} - pub fn get_llreturn(&self) -> BasicBlockRef { - if self.llreturn.get().is_none() { - - self.llreturn.set(Some(unsafe { - llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(), self.llfn, - "return\0".as_ptr() as *const _) - })) - } +pub fn get_llreturn(&self) -> BasicBlockRef { + if self.llreturn.get().is_none() { - self.llreturn.get().unwrap() + self.llreturn.set(Some(unsafe { + llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(), self.llfn, + "return\0".as_ptr() as *const _) + })) } - pub fn get_ret_slot(&self, bcx: Block<'a, 'tcx>, - output: ty::FnOutput<'tcx>, - name: &str) -> ValueRef { - if self.needs_ret_allocas { - base::alloca_no_lifetime(bcx, match output { - ty::FnConverging(output_type) => type_of::type_of(bcx.ccx(), output_type), - ty::FnDiverging => Type::void(bcx.ccx()) - }, name) - } else { - self.llretslotptr.get().unwrap() - } - } + self.llreturn.get().unwrap() +} - pub fn new_block(&'a self, - is_lpad: bool, - name: &str, - opt_node_id: Option) - -> Block<'a, 'tcx> { - unsafe { - let name = CString::from_slice(name.as_bytes()); - let llbb = llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(), - self.llfn, - name.as_ptr()); - BlockS::new(llbb, is_lpad, opt_node_id, self) - } +pub fn get_ret_slot(&self, bcx: Block<'a, 'tcx>, + output: ty::FnOutput<'tcx>, + name: &str) -> ValueRef { + if self.needs_ret_allocas { + base::alloca_no_lifetime(bcx, match output { + ty::FnConverging(output_type) => type_of::type_of(bcx.ccx(), output_type), + ty::FnDiverging => Type::void(bcx.ccx()) + }, name) + } else { + self.llretslotptr.get().unwrap() } +} - pub fn new_id_block(&'a self, - name: &str, - node_id: ast::NodeId) - -> Block<'a, 'tcx> { - self.new_block(false, name, Some(node_id)) +pub fn new_block(&'a self, + is_lpad: bool, + name: &str, + opt_node_id: Option) + -> Block<'a, 'tcx> { + unsafe { + let name = CString::from_slice(name.as_bytes()); + let llbb = llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(), + self.llfn, + name.as_ptr()); + BlockS::new(llbb, is_lpad, opt_node_id, self) } +} - pub fn new_temp_block(&'a self, - name: &str) - -> Block<'a, 'tcx> { - self.new_block(false, name, None) - } +pub fn new_id_block(&'a self, + name: &str, + node_id: ast::NodeId) + -> Block<'a, 'tcx> { + self.new_block(false, name, Some(node_id)) +} - pub fn join_blocks(&'a self, - id: ast::NodeId, - in_cxs: &[Block<'a, 'tcx>]) - -> Block<'a, 'tcx> { - let out = self.new_id_block("join", id); - let mut reachable = false; - for bcx in in_cxs.iter() { - if !bcx.unreachable.get() { - build::Br(*bcx, out.llbb); - reachable = true; - } - } - if !reachable { - build::Unreachable(out); +pub fn new_temp_block(&'a self, + name: &str) + -> Block<'a, 'tcx> { + self.new_block(false, name, None) +} + +pub fn join_blocks(&'a self, + id: ast::NodeId, + in_cxs: &[Block<'a, 'tcx>]) + -> Block<'a, 'tcx> { + let out = self.new_id_block("join", id); + let mut reachable = false; + for bcx in in_cxs.iter() { + if !bcx.unreachable.get() { + build::Br(*bcx, out.llbb); + reachable = true; } - return out; } - - pub fn monomorphize(&self, value: &T) -> T - where T : TypeFoldable<'tcx> + Repr<'tcx> + HasProjectionTypes + Clone - { - monomorphize::apply_param_substs(self.ccx.tcx(), - self.param_substs, - value) + if !reachable { + build::Unreachable(out); } + return out; +} + +pub fn monomorphize(&self, value: &T) -> T + where T : TypeFoldable<'tcx> + Repr<'tcx> + HasProjectionTypes + Clone +{ + monomorphize::apply_param_substs(self.ccx.tcx(), + self.param_substs, + value) +} } // Basic block context. We create a block context for each basic block @@ -482,277 +537,277 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> { // with many basic blocks per function. All the basic blocks attached to a // function are organized as a directed graph. pub struct BlockS<'blk, 'tcx: 'blk> { - // The BasicBlockRef returned from a call to - // llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic - // block to the function pointed to by llfn. We insert - // instructions into that block by way of this block context. - // The block pointing to this one in the function's digraph. - pub llbb: BasicBlockRef, - pub terminated: Cell, - pub unreachable: Cell, +// The BasicBlockRef returned from a call to +// llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic +// block to the function pointed to by llfn. We insert +// instructions into that block by way of this block context. +// The block pointing to this one in the function's digraph. +pub llbb: BasicBlockRef, +pub terminated: Cell, +pub unreachable: Cell, - // Is this block part of a landing pad? - pub is_lpad: bool, +// Is this block part of a landing pad? +pub is_lpad: bool, - // AST node-id associated with this block, if any. Used for - // debugging purposes only. - pub opt_node_id: Option, +// AST node-id associated with this block, if any. Used for +// debugging purposes only. +pub opt_node_id: Option, - // The function context for the function to which this block is - // attached. - pub fcx: &'blk FunctionContext<'blk, 'tcx>, +// The function context for the function to which this block is +// attached. +pub fcx: &'blk FunctionContext<'blk, 'tcx>, } pub type Block<'blk, 'tcx> = &'blk BlockS<'blk, 'tcx>; impl<'blk, 'tcx> BlockS<'blk, 'tcx> { - pub fn new(llbb: BasicBlockRef, - is_lpad: bool, - opt_node_id: Option, - fcx: &'blk FunctionContext<'blk, 'tcx>) - -> Block<'blk, 'tcx> { - fcx.block_arena.alloc(BlockS { - llbb: llbb, - terminated: Cell::new(false), - unreachable: Cell::new(false), - is_lpad: is_lpad, - opt_node_id: opt_node_id, - fcx: fcx - }) - } +pub fn new(llbb: BasicBlockRef, + is_lpad: bool, + opt_node_id: Option, + fcx: &'blk FunctionContext<'blk, 'tcx>) + -> Block<'blk, 'tcx> { + fcx.block_arena.alloc(BlockS { + llbb: llbb, + terminated: Cell::new(false), + unreachable: Cell::new(false), + is_lpad: is_lpad, + opt_node_id: opt_node_id, + fcx: fcx + }) +} - pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> { - self.fcx.ccx - } - pub fn tcx(&self) -> &'blk ty::ctxt<'tcx> { - self.fcx.ccx.tcx() - } - pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() } +pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> { + self.fcx.ccx +} +pub fn tcx(&self) -> &'blk ty::ctxt<'tcx> { + self.fcx.ccx.tcx() +} +pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() } - pub fn ident(&self, ident: Ident) -> String { - token::get_ident(ident).get().to_string() - } +pub fn ident(&self, ident: Ident) -> String { + token::get_ident(ident).get().to_string() +} - pub fn node_id_to_string(&self, id: ast::NodeId) -> String { - self.tcx().map.node_to_string(id).to_string() - } +pub fn node_id_to_string(&self, id: ast::NodeId) -> String { + self.tcx().map.node_to_string(id).to_string() +} - pub fn expr_to_string(&self, e: &ast::Expr) -> String { - e.repr(self.tcx()) - } +pub fn expr_to_string(&self, e: &ast::Expr) -> String { + e.repr(self.tcx()) +} - pub fn def(&self, nid: ast::NodeId) -> def::Def { - match self.tcx().def_map.borrow().get(&nid) { - Some(v) => v.clone(), - None => { - self.tcx().sess.bug(format!( - "no def associated with node id {}", nid)[]); - } +pub fn def(&self, nid: ast::NodeId) -> def::Def { + match self.tcx().def_map.borrow().get(&nid) { + Some(v) => v.clone(), + None => { + self.tcx().sess.bug(format!( + "no def associated with node id {}", nid).index(&FullRange)); } } +} - pub fn val_to_string(&self, val: ValueRef) -> String { - self.ccx().tn().val_to_string(val) - } +pub fn val_to_string(&self, val: ValueRef) -> String { + self.ccx().tn().val_to_string(val) +} - pub fn llty_str(&self, ty: Type) -> String { - self.ccx().tn().type_to_string(ty) - } +pub fn llty_str(&self, ty: Type) -> String { + self.ccx().tn().type_to_string(ty) +} - pub fn ty_to_string(&self, t: Ty<'tcx>) -> String { - t.repr(self.tcx()) - } +pub fn ty_to_string(&self, t: Ty<'tcx>) -> String { + t.repr(self.tcx()) +} - pub fn to_str(&self) -> String { - format!("[block {:p}]", self) - } +pub fn to_str(&self) -> String { + format!("[block {:p}]", self) +} - pub fn monomorphize(&self, value: &T) -> T - where T : TypeFoldable<'tcx> + Repr<'tcx> + HasProjectionTypes + Clone - { - monomorphize::apply_param_substs(self.tcx(), - self.fcx.param_substs, - value) - } +pub fn monomorphize(&self, value: &T) -> T + where T : TypeFoldable<'tcx> + Repr<'tcx> + HasProjectionTypes + Clone +{ + monomorphize::apply_param_substs(self.tcx(), + self.fcx.param_substs, + value) +} } impl<'blk, 'tcx> mc::Typer<'tcx> for BlockS<'blk, 'tcx> { - fn tcx<'a>(&'a self) -> &'a ty::ctxt<'tcx> { - self.tcx() - } +fn tcx<'a>(&'a self) -> &'a ty::ctxt<'tcx> { + self.tcx() +} - fn node_ty(&self, id: ast::NodeId) -> mc::McResult> { - Ok(node_id_type(self, id)) - } +fn node_ty(&self, id: ast::NodeId) -> mc::McResult> { + Ok(node_id_type(self, id)) +} - fn expr_ty_adjusted(&self, expr: &ast::Expr) -> mc::McResult> { - Ok(expr_ty_adjusted(self, expr)) - } +fn expr_ty_adjusted(&self, expr: &ast::Expr) -> mc::McResult> { + Ok(expr_ty_adjusted(self, expr)) +} - fn node_method_ty(&self, method_call: ty::MethodCall) -> Option> { - self.tcx() - .method_map - .borrow() - .get(&method_call) - .map(|method| monomorphize_type(self, method.ty)) - } +fn node_method_ty(&self, method_call: ty::MethodCall) -> Option> { + self.tcx() + .method_map + .borrow() + .get(&method_call) + .map(|method| monomorphize_type(self, method.ty)) +} - fn node_method_origin(&self, method_call: ty::MethodCall) - -> Option> - { - self.tcx() - .method_map - .borrow() - .get(&method_call) - .map(|method| method.origin.clone()) - } +fn node_method_origin(&self, method_call: ty::MethodCall) + -> Option> +{ + self.tcx() + .method_map + .borrow() + .get(&method_call) + .map(|method| method.origin.clone()) +} - fn adjustments<'a>(&'a self) -> &'a RefCell>> { - &self.tcx().adjustments - } +fn adjustments<'a>(&'a self) -> &'a RefCell>> { + &self.tcx().adjustments +} - fn is_method_call(&self, id: ast::NodeId) -> bool { - self.tcx().method_map.borrow().contains_key(&ty::MethodCall::expr(id)) - } +fn is_method_call(&self, id: ast::NodeId) -> bool { + self.tcx().method_map.borrow().contains_key(&ty::MethodCall::expr(id)) +} - fn temporary_scope(&self, rvalue_id: ast::NodeId) -> Option { - self.tcx().region_maps.temporary_scope(rvalue_id) - } +fn temporary_scope(&self, rvalue_id: ast::NodeId) -> Option { + self.tcx().region_maps.temporary_scope(rvalue_id) +} - fn upvar_borrow(&self, upvar_id: ty::UpvarId) -> Option { - Some(self.tcx().upvar_borrow_map.borrow()[upvar_id].clone()) - } +fn upvar_borrow(&self, upvar_id: ty::UpvarId) -> Option { + Some(self.tcx().upvar_borrow_map.borrow()[upvar_id].clone()) +} - fn capture_mode(&self, closure_expr_id: ast::NodeId) - -> ast::CaptureClause { - self.tcx().capture_modes.borrow()[closure_expr_id].clone() - } +fn capture_mode(&self, closure_expr_id: ast::NodeId) + -> ast::CaptureClause { + self.tcx().capture_modes.borrow()[closure_expr_id].clone() +} - fn type_moves_by_default(&self, span: Span, ty: Ty<'tcx>) -> bool { - self.fcx.param_env.type_moves_by_default(span, ty) - } +fn type_moves_by_default(&self, span: Span, ty: Ty<'tcx>) -> bool { + self.fcx.param_env.type_moves_by_default(span, ty) +} } impl<'blk, 'tcx> ty::UnboxedClosureTyper<'tcx> for BlockS<'blk, 'tcx> { - fn param_env<'a>(&'a self) -> &'a ty::ParameterEnvironment<'a, 'tcx> { - &self.fcx.param_env - } +fn param_env<'a>(&'a self) -> &'a ty::ParameterEnvironment<'a, 'tcx> { + &self.fcx.param_env +} - fn unboxed_closure_kind(&self, - def_id: ast::DefId) - -> ty::UnboxedClosureKind - { - let typer = NormalizingUnboxedClosureTyper::new(self.tcx()); - typer.unboxed_closure_kind(def_id) - } +fn unboxed_closure_kind(&self, + def_id: ast::DefId) + -> ty::UnboxedClosureKind +{ + let typer = NormalizingUnboxedClosureTyper::new(self.tcx()); + typer.unboxed_closure_kind(def_id) +} - fn unboxed_closure_type(&self, - def_id: ast::DefId, - substs: &subst::Substs<'tcx>) - -> ty::ClosureTy<'tcx> - { - let typer = NormalizingUnboxedClosureTyper::new(self.tcx()); - typer.unboxed_closure_type(def_id, substs) - } +fn unboxed_closure_type(&self, + def_id: ast::DefId, + substs: &subst::Substs<'tcx>) + -> ty::ClosureTy<'tcx> +{ + let typer = NormalizingUnboxedClosureTyper::new(self.tcx()); + typer.unboxed_closure_type(def_id, substs) +} - fn unboxed_closure_upvars(&self, - def_id: ast::DefId, - substs: &Substs<'tcx>) - -> Option>> - { - let typer = NormalizingUnboxedClosureTyper::new(self.tcx()); - typer.unboxed_closure_upvars(def_id, substs) - } +fn unboxed_closure_upvars(&self, + def_id: ast::DefId, + substs: &Substs<'tcx>) + -> Option>> +{ + let typer = NormalizingUnboxedClosureTyper::new(self.tcx()); + typer.unboxed_closure_upvars(def_id, substs) +} } pub struct Result<'blk, 'tcx: 'blk> { - pub bcx: Block<'blk, 'tcx>, - pub val: ValueRef +pub bcx: Block<'blk, 'tcx>, +pub val: ValueRef } impl<'b, 'tcx> Result<'b, 'tcx> { - pub fn new(bcx: Block<'b, 'tcx>, val: ValueRef) -> Result<'b, 'tcx> { - Result { - bcx: bcx, - val: val, - } +pub fn new(bcx: Block<'b, 'tcx>, val: ValueRef) -> Result<'b, 'tcx> { + Result { + bcx: bcx, + val: val, } } +} pub fn val_ty(v: ValueRef) -> Type { - unsafe { - Type::from_ref(llvm::LLVMTypeOf(v)) - } +unsafe { + Type::from_ref(llvm::LLVMTypeOf(v)) +} } // LLVM constant constructors. pub fn C_null(t: Type) -> ValueRef { - unsafe { - llvm::LLVMConstNull(t.to_ref()) - } +unsafe { + llvm::LLVMConstNull(t.to_ref()) +} } pub fn C_undef(t: Type) -> ValueRef { - unsafe { - llvm::LLVMGetUndef(t.to_ref()) - } +unsafe { + llvm::LLVMGetUndef(t.to_ref()) +} } pub fn C_integral(t: Type, u: u64, sign_extend: bool) -> ValueRef { - unsafe { - llvm::LLVMConstInt(t.to_ref(), u, sign_extend as Bool) - } +unsafe { + llvm::LLVMConstInt(t.to_ref(), u, sign_extend as Bool) +} } pub fn C_floating(s: &str, t: Type) -> ValueRef { - unsafe { - let s = CString::from_slice(s.as_bytes()); - llvm::LLVMConstRealOfString(t.to_ref(), s.as_ptr()) - } +unsafe { + let s = CString::from_slice(s.as_bytes()); + llvm::LLVMConstRealOfString(t.to_ref(), s.as_ptr()) +} } pub fn C_nil(ccx: &CrateContext) -> ValueRef { - C_struct(ccx, &[], false) +C_struct(ccx, &[], false) } pub fn C_bool(ccx: &CrateContext, val: bool) -> ValueRef { - C_integral(Type::i1(ccx), val as u64, false) +C_integral(Type::i1(ccx), val as u64, false) } pub fn C_i32(ccx: &CrateContext, i: i32) -> ValueRef { - C_integral(Type::i32(ccx), i as u64, true) +C_integral(Type::i32(ccx), i as u64, true) } pub fn C_i64(ccx: &CrateContext, i: i64) -> ValueRef { - C_integral(Type::i64(ccx), i as u64, true) +C_integral(Type::i64(ccx), i as u64, true) } pub fn C_u64(ccx: &CrateContext, i: u64) -> ValueRef { - C_integral(Type::i64(ccx), i, false) +C_integral(Type::i64(ccx), i, false) } pub fn C_int(ccx: &CrateContext, i: I) -> ValueRef { - let v = i.as_i64(); +let v = i.as_i64(); - match machine::llbitsize_of_real(ccx, ccx.int_type()) { - 32 => assert!(v < (1<<31) && v >= -(1<<31)), - 64 => {}, - n => panic!("unsupported target size: {}", n) - } +match machine::llbitsize_of_real(ccx, ccx.int_type()) { + 32 => assert!(v < (1<<31) && v >= -(1<<31)), + 64 => {}, + n => panic!("unsupported target size: {}", n) +} - C_integral(ccx.int_type(), v as u64, true) +C_integral(ccx.int_type(), v as u64, true) } pub fn C_uint(ccx: &CrateContext, i: I) -> ValueRef { - let v = i.as_u64(); +let v = i.as_u64(); - match machine::llbitsize_of_real(ccx, ccx.int_type()) { - 32 => assert!(v < (1<<32)), - 64 => {}, - n => panic!("unsupported target size: {}", n) - } +match machine::llbitsize_of_real(ccx, ccx.int_type()) { + 32 => assert!(v < (1<<32)), + 64 => {}, + n => panic!("unsupported target size: {}", n) +} - C_integral(ccx.int_type(), v, false) +C_integral(ccx.int_type(), v, false) } pub trait AsI64 { fn as_i64(self) -> i64; } @@ -769,347 +824,347 @@ impl AsU64 for u32 { fn as_u64(self) -> u64 { self as u64 }} impl AsU64 for uint { fn as_u64(self) -> u64 { self as u64 }} pub fn C_u8(ccx: &CrateContext, i: uint) -> ValueRef { - C_integral(Type::i8(ccx), i as u64, false) +C_integral(Type::i8(ccx), i as u64, false) } // This is a 'c-like' raw string, which differs from // our boxed-and-length-annotated strings. pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> ValueRef { - unsafe { - match cx.const_cstr_cache().borrow().get(&s) { - Some(&llval) => return llval, - None => () - } +unsafe { + match cx.const_cstr_cache().borrow().get(&s) { + Some(&llval) => return llval, + None => () + } - let sc = llvm::LLVMConstStringInContext(cx.llcx(), - s.get().as_ptr() as *const c_char, - s.get().len() as c_uint, - !null_terminated as Bool); + let sc = llvm::LLVMConstStringInContext(cx.llcx(), + s.get().as_ptr() as *const c_char, + s.get().len() as c_uint, + !null_terminated as Bool); - let gsym = token::gensym("str"); - let buf = CString::from_vec(format!("str{}", gsym.uint()).into_bytes()); - let g = llvm::LLVMAddGlobal(cx.llmod(), val_ty(sc).to_ref(), buf.as_ptr()); - llvm::LLVMSetInitializer(g, sc); - llvm::LLVMSetGlobalConstant(g, True); - llvm::SetLinkage(g, llvm::InternalLinkage); + let gsym = token::gensym("str"); + let buf = CString::from_vec(format!("str{}", gsym.uint()).into_bytes()); + let g = llvm::LLVMAddGlobal(cx.llmod(), val_ty(sc).to_ref(), buf.as_ptr()); + llvm::LLVMSetInitializer(g, sc); + llvm::LLVMSetGlobalConstant(g, True); + llvm::SetLinkage(g, llvm::InternalLinkage); - cx.const_cstr_cache().borrow_mut().insert(s, g); - g - } + cx.const_cstr_cache().borrow_mut().insert(s, g); + g +} } // NB: Do not use `do_spill_noroot` to make this into a constant string, or // you will be kicked off fast isel. See issue #4352 for an example of this. pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef { - let len = s.get().len(); - let cs = consts::ptrcast(C_cstr(cx, s, false), Type::i8p(cx)); - C_named_struct(cx.tn().find_type("str_slice").unwrap(), &[cs, C_uint(cx, len)]) +let len = s.get().len(); +let cs = consts::ptrcast(C_cstr(cx, s, false), Type::i8p(cx)); +C_named_struct(cx.tn().find_type("str_slice").unwrap(), &[cs, C_uint(cx, len)]) } pub fn C_binary_slice(cx: &CrateContext, data: &[u8]) -> ValueRef { - unsafe { - let len = data.len(); - let lldata = C_bytes(cx, data); - - let gsym = token::gensym("binary"); - let name = format!("binary{}", gsym.uint()); - let name = CString::from_vec(name.into_bytes()); - let g = llvm::LLVMAddGlobal(cx.llmod(), val_ty(lldata).to_ref(), - name.as_ptr()); - llvm::LLVMSetInitializer(g, lldata); - llvm::LLVMSetGlobalConstant(g, True); - llvm::SetLinkage(g, llvm::InternalLinkage); - - let cs = consts::ptrcast(g, Type::i8p(cx)); - C_struct(cx, &[cs, C_uint(cx, len)], false) - } +unsafe { + let len = data.len(); + let lldata = C_bytes(cx, data); + + let gsym = token::gensym("binary"); + let name = format!("binary{}", gsym.uint()); + let name = CString::from_vec(name.into_bytes()); + let g = llvm::LLVMAddGlobal(cx.llmod(), val_ty(lldata).to_ref(), + name.as_ptr()); + llvm::LLVMSetInitializer(g, lldata); + llvm::LLVMSetGlobalConstant(g, True); + llvm::SetLinkage(g, llvm::InternalLinkage); + + let cs = consts::ptrcast(g, Type::i8p(cx)); + C_struct(cx, &[cs, C_uint(cx, len)], false) +} } pub fn C_struct(cx: &CrateContext, elts: &[ValueRef], packed: bool) -> ValueRef { - C_struct_in_context(cx.llcx(), elts, packed) +C_struct_in_context(cx.llcx(), elts, packed) } pub fn C_struct_in_context(llcx: ContextRef, elts: &[ValueRef], packed: bool) -> ValueRef { - unsafe { - llvm::LLVMConstStructInContext(llcx, - elts.as_ptr(), elts.len() as c_uint, - packed as Bool) - } +unsafe { + llvm::LLVMConstStructInContext(llcx, + elts.as_ptr(), elts.len() as c_uint, + packed as Bool) +} } pub fn C_named_struct(t: Type, elts: &[ValueRef]) -> ValueRef { - unsafe { - llvm::LLVMConstNamedStruct(t.to_ref(), elts.as_ptr(), elts.len() as c_uint) - } +unsafe { + llvm::LLVMConstNamedStruct(t.to_ref(), elts.as_ptr(), elts.len() as c_uint) +} } pub fn C_array(ty: Type, elts: &[ValueRef]) -> ValueRef { - unsafe { - return llvm::LLVMConstArray(ty.to_ref(), elts.as_ptr(), elts.len() as c_uint); - } +unsafe { + return llvm::LLVMConstArray(ty.to_ref(), elts.as_ptr(), elts.len() as c_uint); +} } pub fn C_bytes(cx: &CrateContext, bytes: &[u8]) -> ValueRef { - C_bytes_in_context(cx.llcx(), bytes) +C_bytes_in_context(cx.llcx(), bytes) } pub fn C_bytes_in_context(llcx: ContextRef, bytes: &[u8]) -> ValueRef { - unsafe { - let ptr = bytes.as_ptr() as *const c_char; - return llvm::LLVMConstStringInContext(llcx, ptr, bytes.len() as c_uint, True); - } +unsafe { + let ptr = bytes.as_ptr() as *const c_char; + return llvm::LLVMConstStringInContext(llcx, ptr, bytes.len() as c_uint, True); +} } pub fn const_get_elt(cx: &CrateContext, v: ValueRef, us: &[c_uint]) - -> ValueRef { - unsafe { - let r = llvm::LLVMConstExtractValue(v, us.as_ptr(), us.len() as c_uint); + -> ValueRef { +unsafe { + let r = llvm::LLVMConstExtractValue(v, us.as_ptr(), us.len() as c_uint); - debug!("const_get_elt(v={}, us={}, r={})", - cx.tn().val_to_string(v), us, cx.tn().val_to_string(r)); + debug!("const_get_elt(v={}, us={:?}, r={})", + cx.tn().val_to_string(v), us, cx.tn().val_to_string(r)); - return r; - } + return r; +} } pub fn is_const(v: ValueRef) -> bool { - unsafe { - llvm::LLVMIsConstant(v) == True - } +unsafe { + llvm::LLVMIsConstant(v) == True +} } pub fn const_to_int(v: ValueRef) -> i64 { - unsafe { - llvm::LLVMConstIntGetSExtValue(v) - } +unsafe { + llvm::LLVMConstIntGetSExtValue(v) +} } pub fn const_to_uint(v: ValueRef) -> u64 { - unsafe { - llvm::LLVMConstIntGetZExtValue(v) - } +unsafe { + llvm::LLVMConstIntGetZExtValue(v) +} } pub fn is_undef(val: ValueRef) -> bool { - unsafe { - llvm::LLVMIsUndef(val) != False - } +unsafe { + llvm::LLVMIsUndef(val) != False +} } #[allow(dead_code)] // potentially useful pub fn is_null(val: ValueRef) -> bool { - unsafe { - llvm::LLVMIsNull(val) != False - } +unsafe { + llvm::LLVMIsNull(val) != False +} } pub fn monomorphize_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, t: Ty<'tcx>) -> Ty<'tcx> { - bcx.fcx.monomorphize(&t) +bcx.fcx.monomorphize(&t) } pub fn node_id_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, id: ast::NodeId) -> Ty<'tcx> { - let tcx = bcx.tcx(); - let t = ty::node_id_to_type(tcx, id); - monomorphize_type(bcx, t) +let tcx = bcx.tcx(); +let t = ty::node_id_to_type(tcx, id); +monomorphize_type(bcx, t) } pub fn expr_ty<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &ast::Expr) -> Ty<'tcx> { - node_id_type(bcx, ex.id) +node_id_type(bcx, ex.id) } pub fn expr_ty_adjusted<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &ast::Expr) -> Ty<'tcx> { - monomorphize_type(bcx, ty::expr_ty_adjusted(bcx.tcx(), ex)) +monomorphize_type(bcx, ty::expr_ty_adjusted(bcx.tcx(), ex)) } /// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we /// do not (necessarily) resolve all nested obligations on the impl. Note that type check should /// guarantee to us that all nested obligations *could be* resolved if we wanted to. pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - span: Span, - trait_ref: ty::PolyTraitRef<'tcx>) - -> traits::Vtable<'tcx, ()> + span: Span, + trait_ref: ty::PolyTraitRef<'tcx>) + -> traits::Vtable<'tcx, ()> { - let tcx = ccx.tcx(); - - // Remove any references to regions; this helps improve caching. - let trait_ref = ty_fold::erase_regions(tcx, trait_ref); - - // First check the cache. - match ccx.trait_cache().borrow().get(&trait_ref) { - Some(vtable) => { - info!("Cache hit: {}", trait_ref.repr(ccx.tcx())); - return (*vtable).clone(); - } - None => { } - } - - debug!("trans fulfill_obligation: trait_ref={}", trait_ref.repr(ccx.tcx())); - - ty::populate_implementations_for_trait_if_necessary(tcx, trait_ref.def_id()); - let infcx = infer::new_infer_ctxt(tcx); - - // Do the initial selection for the obligation. This yields the - // shallow result we are looking for -- that is, what specific impl. - let typer = NormalizingUnboxedClosureTyper::new(tcx); - let mut selcx = traits::SelectionContext::new(&infcx, &typer); - let obligation = traits::Obligation::new(traits::ObligationCause::dummy(), - trait_ref.to_poly_trait_predicate()); - let selection = match selcx.select(&obligation) { - Ok(Some(selection)) => selection, - Ok(None) => { - // Ambiguity can happen when monomorphizing during trans - // expands to some humongo type that never occurred - // statically -- this humongo type can then overflow, - // leading to an ambiguous result. So report this as an - // overflow bug, since I believe this is the only case - // where ambiguity can result. - debug!("Encountered ambiguity selecting `{}` during trans, \ - presuming due to overflow", - trait_ref.repr(tcx)); - ccx.sess().span_fatal( - span, - "reached the recursion limit during monomorphization"); - } - Err(e) => { - tcx.sess.span_bug( - span, - format!("Encountered error `{}` selecting `{}` during trans", - e.repr(tcx), - trait_ref.repr(tcx))[]) - } - }; - - // Currently, we use a fulfillment context to completely resolve - // all nested obligations. This is because they can inform the - // inference of the impl's type parameters. - let mut fulfill_cx = traits::FulfillmentContext::new(); - let vtable = selection.map_move_nested(|predicate| { - fulfill_cx.register_predicate_obligation(&infcx, predicate); - }); - let vtable = drain_fulfillment_cx(span, &infcx, &mut fulfill_cx, &vtable); - - info!("Cache miss: {}", trait_ref.repr(ccx.tcx())); - ccx.trait_cache().borrow_mut().insert(trait_ref, - vtable.clone()); - - vtable +let tcx = ccx.tcx(); + +// Remove any references to regions; this helps improve caching. +let trait_ref = ty_fold::erase_regions(tcx, trait_ref); + +// First check the cache. +match ccx.trait_cache().borrow().get(&trait_ref) { + Some(vtable) => { + info!("Cache hit: {}", trait_ref.repr(ccx.tcx())); + return (*vtable).clone(); + } + None => { } +} + +debug!("trans fulfill_obligation: trait_ref={}", trait_ref.repr(ccx.tcx())); + +ty::populate_implementations_for_trait_if_necessary(tcx, trait_ref.def_id()); +let infcx = infer::new_infer_ctxt(tcx); + +// Do the initial selection for the obligation. This yields the +// shallow result we are looking for -- that is, what specific impl. +let typer = NormalizingUnboxedClosureTyper::new(tcx); +let mut selcx = traits::SelectionContext::new(&infcx, &typer); +let obligation = traits::Obligation::new(traits::ObligationCause::dummy(), + trait_ref.to_poly_trait_predicate()); +let selection = match selcx.select(&obligation) { + Ok(Some(selection)) => selection, + Ok(None) => { + // Ambiguity can happen when monomorphizing during trans + // expands to some humongo type that never occurred + // statically -- this humongo type can then overflow, + // leading to an ambiguous result. So report this as an + // overflow bug, since I believe this is the only case + // where ambiguity can result. + debug!("Encountered ambiguity selecting `{}` during trans, \ + presuming due to overflow", + trait_ref.repr(tcx)); + ccx.sess().span_fatal( + span, + "reached the recursion limit during monomorphization"); + } + Err(e) => { + tcx.sess.span_bug( + span, + format!("Encountered error `{}` selecting `{}` during trans", + e.repr(tcx), + trait_ref.repr(tcx)).index(&FullRange)) + } +}; + +// Currently, we use a fulfillment context to completely resolve +// all nested obligations. This is because they can inform the +// inference of the impl's type parameters. +let mut fulfill_cx = traits::FulfillmentContext::new(); +let vtable = selection.map_move_nested(|predicate| { + fulfill_cx.register_predicate_obligation(&infcx, predicate); +}); +let vtable = drain_fulfillment_cx(span, &infcx, &mut fulfill_cx, &vtable); + +info!("Cache miss: {}", trait_ref.repr(ccx.tcx())); +ccx.trait_cache().borrow_mut().insert(trait_ref, + vtable.clone()); + +vtable } pub struct NormalizingUnboxedClosureTyper<'a,'tcx:'a> { - param_env: ty::ParameterEnvironment<'a, 'tcx> +param_env: ty::ParameterEnvironment<'a, 'tcx> } impl<'a,'tcx> NormalizingUnboxedClosureTyper<'a,'tcx> { - pub fn new(tcx: &'a ty::ctxt<'tcx>) -> NormalizingUnboxedClosureTyper<'a,'tcx> { - // Parameter environment is used to give details about type parameters, - // but since we are in trans, everything is fully monomorphized. - NormalizingUnboxedClosureTyper { param_env: ty::empty_parameter_environment(tcx) } - } +pub fn new(tcx: &'a ty::ctxt<'tcx>) -> NormalizingUnboxedClosureTyper<'a,'tcx> { + // Parameter environment is used to give details about type parameters, + // but since we are in trans, everything is fully monomorphized. + NormalizingUnboxedClosureTyper { param_env: ty::empty_parameter_environment(tcx) } +} } impl<'a,'tcx> ty::UnboxedClosureTyper<'tcx> for NormalizingUnboxedClosureTyper<'a,'tcx> { - fn param_env<'b>(&'b self) -> &'b ty::ParameterEnvironment<'b,'tcx> { - &self.param_env - } +fn param_env<'b>(&'b self) -> &'b ty::ParameterEnvironment<'b,'tcx> { + &self.param_env +} - fn unboxed_closure_kind(&self, - def_id: ast::DefId) - -> ty::UnboxedClosureKind - { - self.param_env.tcx.unboxed_closure_kind(def_id) - } +fn unboxed_closure_kind(&self, + def_id: ast::DefId) + -> ty::UnboxedClosureKind +{ + self.param_env.tcx.unboxed_closure_kind(def_id) +} - fn unboxed_closure_type(&self, - def_id: ast::DefId, - substs: &subst::Substs<'tcx>) - -> ty::ClosureTy<'tcx> - { - // the substitutions in `substs` are already monomorphized, - // but we still must normalize associated types - let closure_ty = self.param_env.tcx.unboxed_closure_type(def_id, substs); - monomorphize::normalize_associated_type(self.param_env.tcx, &closure_ty) - } +fn unboxed_closure_type(&self, + def_id: ast::DefId, + substs: &subst::Substs<'tcx>) + -> ty::ClosureTy<'tcx> +{ + // the substitutions in `substs` are already monomorphized, + // but we still must normalize associated types + let closure_ty = self.param_env.tcx.unboxed_closure_type(def_id, substs); + monomorphize::normalize_associated_type(self.param_env.tcx, &closure_ty) +} - fn unboxed_closure_upvars(&self, - def_id: ast::DefId, - substs: &Substs<'tcx>) - -> Option>> - { - // the substitutions in `substs` are already monomorphized, - // but we still must normalize associated types - let result = ty::unboxed_closure_upvars(&self.param_env, def_id, substs); - monomorphize::normalize_associated_type(self.param_env.tcx, &result) - } +fn unboxed_closure_upvars(&self, + def_id: ast::DefId, + substs: &Substs<'tcx>) + -> Option>> +{ + // the substitutions in `substs` are already monomorphized, + // but we still must normalize associated types + let result = ty::unboxed_closure_upvars(&self.param_env, def_id, substs); + monomorphize::normalize_associated_type(self.param_env.tcx, &result) +} } pub fn drain_fulfillment_cx<'a,'tcx,T>(span: Span, - infcx: &infer::InferCtxt<'a,'tcx>, - fulfill_cx: &mut traits::FulfillmentContext<'tcx>, - result: &T) - -> T - where T : TypeFoldable<'tcx> + Repr<'tcx> + infcx: &infer::InferCtxt<'a,'tcx>, + fulfill_cx: &mut traits::FulfillmentContext<'tcx>, + result: &T) + -> T +where T : TypeFoldable<'tcx> + Repr<'tcx> { - debug!("drain_fulfillment_cx(result={})", - result.repr(infcx.tcx)); - - // In principle, we only need to do this so long as `result` - // contains unbound type parameters. It could be a slight - // optimization to stop iterating early. - let typer = NormalizingUnboxedClosureTyper::new(infcx.tcx); - match fulfill_cx.select_all_or_error(infcx, &typer) { - Ok(()) => { } - Err(errors) => { - if errors.iter().all(|e| e.is_overflow()) { - // See Ok(None) case above. - infcx.tcx.sess.span_fatal( - span, - "reached the recursion limit during monomorphization"); - } else { - infcx.tcx.sess.span_bug( - span, - format!("Encountered errors `{}` fulfilling during trans", - errors.repr(infcx.tcx))[]); - } +debug!("drain_fulfillment_cx(result={})", + result.repr(infcx.tcx)); + +// In principle, we only need to do this so long as `result` +// contains unbound type parameters. It could be a slight +// optimization to stop iterating early. +let typer = NormalizingUnboxedClosureTyper::new(infcx.tcx); +match fulfill_cx.select_all_or_error(infcx, &typer) { + Ok(()) => { } + Err(errors) => { + if errors.iter().all(|e| e.is_overflow()) { + // See Ok(None) case above. + infcx.tcx.sess.span_fatal( + span, + "reached the recursion limit during monomorphization"); + } else { + infcx.tcx.sess.span_bug( + span, + format!("Encountered errors `{}` fulfilling during trans", + errors.repr(infcx.tcx)).index(&FullRange)); } } +} - // Use freshen to simultaneously replace all type variables with - // their bindings and replace all regions with 'static. This is - // sort of overkill because we do not expect there to be any - // unbound type variables, hence no `TyFresh` types should ever be - // inserted. - result.fold_with(&mut infcx.freshener()) +// Use freshen to simultaneously replace all type variables with +// their bindings and replace all regions with 'static. This is +// sort of overkill because we do not expect there to be any +// unbound type variables, hence no `TyFresh` types should ever be +// inserted. +result.fold_with(&mut infcx.freshener()) } // Key used to lookup values supplied for type parameters in an expr. #[derive(Copy, PartialEq, Show)] pub enum ExprOrMethodCall { - // Type parameters for a path like `None::` - ExprId(ast::NodeId), +// Type parameters for a path like `None::` +ExprId(ast::NodeId), - // Type parameters for a method call like `a.foo::()` - MethodCallKey(ty::MethodCall) +// Type parameters for a method call like `a.foo::()` +MethodCallKey(ty::MethodCall) } pub fn node_id_substs<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - node: ExprOrMethodCall, - param_substs: &subst::Substs<'tcx>) - -> subst::Substs<'tcx> { - let tcx = ccx.tcx(); - - let substs = match node { - ExprId(id) => { - ty::node_id_item_substs(tcx, id).substs - } - MethodCallKey(method_call) => { - (*tcx.method_map.borrow())[method_call].substs.clone() - } - }; + node: ExprOrMethodCall, + param_substs: &subst::Substs<'tcx>) + -> subst::Substs<'tcx> { +let tcx = ccx.tcx(); + +let substs = match node { + ExprId(id) => { + ty::node_id_item_substs(tcx, id).substs + } + MethodCallKey(method_call) => { + (*tcx.method_map.borrow())[method_call].substs.clone() + } +}; - if substs.types.any(|t| ty::type_needs_infer(*t)) { - tcx.sess.bug(format!("type parameters for node {} include inference types: {}", - node, substs.repr(tcx))[]); +if substs.types.any(|t| ty::type_needs_infer(*t)) { + tcx.sess.bug(format!("type parameters for node {:?} include inference types: {:?}", + node, substs.repr(tcx)).index(&FullRange)); } monomorphize::apply_param_substs(tcx, @@ -1127,8 +1182,8 @@ pub fn langcall(bcx: Block, Err(s) => { let msg = format!("{} {}", msg, s); match span { - Some(span) => bcx.tcx().sess.span_fatal(span, msg[]), - None => bcx.tcx().sess.fatal(msg[]), + Some(span) => bcx.tcx().sess.span_fatal(span, msg.index(&FullRange)), + None => bcx.tcx().sess.fatal(msg.index(&FullRange)), } } } diff --git a/src/librustc_trans/trans/consts.rs b/src/librustc_trans/trans/consts.rs index a3861e71d83de..b0474d7e011a5 100644 --- a/src/librustc_trans/trans/consts.rs +++ b/src/librustc_trans/trans/consts.rs @@ -32,7 +32,7 @@ use syntax::ptr::P; pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit) -> ValueRef { let _icx = push_ctxt("trans_lit"); - debug!("const_lit: {}", lit); + debug!("const_lit: {:?}", lit); match lit.node { ast::LitByte(b) => C_integral(Type::uint_from_ty(cx, ast::TyU8), b as u64, false), ast::LitChar(i) => C_integral(Type::char(cx), i as u64, false), @@ -54,7 +54,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit) _ => cx.sess().span_bug(lit.span, format!("integer literal has type {} (expected int \ or uint)", - ty_to_string(cx.tcx(), lit_int_ty))[]) + ty_to_string(cx.tcx(), lit_int_ty)).index(&FullRange)) } } ast::LitFloat(ref fs, t) => { @@ -74,7 +74,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit) } ast::LitBool(b) => C_bool(cx, b), ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()), - ast::LitBinary(ref data) => C_binary_slice(cx, data[]), + ast::LitBinary(ref data) => C_binary_slice(cx, data.index(&FullRange)), } } @@ -93,9 +93,9 @@ fn const_vec(cx: &CrateContext, e: &ast::Expr, .collect::>(); // If the vector contains enums, an LLVM array won't work. let v = if vs.iter().any(|vi| val_ty(*vi) != llunitty) { - C_struct(cx, vs[], false) + C_struct(cx, vs.index(&FullRange), false) } else { - C_array(llunitty, vs[]) + C_array(llunitty, vs.index(&FullRange)) }; (v, llunitty) } @@ -149,13 +149,13 @@ fn const_deref<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, v: ValueRef, } _ => { cx.sess().bug(format!("unexpected dereferenceable type {}", - ty_to_string(cx.tcx(), t))[]) + ty_to_string(cx.tcx(), t)).index(&FullRange)) } } } None => { cx.sess().bug(format!("cannot dereference const of type {}", - ty_to_string(cx.tcx(), t))[]) + ty_to_string(cx.tcx(), t)).index(&FullRange)) } } } @@ -187,7 +187,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr) None => { } Some(adj) => { match adj { - ty::AdjustReifyFnPointer(_def_id) => { + ty::AdjustReifyFnPointer(_def_id) => { // FIXME(#19925) once fn item types are // zero-sized, we'll need to do something here } @@ -252,15 +252,15 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr) } _ => cx.sess().span_bug(e.span, format!("unimplemented type in const unsize: {}", - ty_to_string(cx.tcx(), ty))[]) + ty_to_string(cx.tcx(), ty)).index(&FullRange)) } } _ => { cx.sess() .span_bug(e.span, format!("unimplemented const \ - autoref {}", - autoref)[]) + autoref {:?}", + autoref).index(&FullRange)) } } } @@ -281,7 +281,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr) } cx.sess().bug(format!("const {} of type {} has size {} instead of {}", e.repr(cx.tcx()), ty_to_string(cx.tcx(), ety), - csize, tsize)[]); + csize, tsize).index(&FullRange)); } (llconst, ety_adjusted) } @@ -431,7 +431,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { _ => cx.sess().span_bug(base.span, format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt))[]) + ty_to_string(cx.tcx(), bt)).index(&FullRange)) }, ty::ty_rptr(_, mt) => match mt.ty.sty { ty::ty_vec(_, Some(u)) => { @@ -440,12 +440,12 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { _ => cx.sess().span_bug(base.span, format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt))[]) + ty_to_string(cx.tcx(), bt)).index(&FullRange)) }, _ => cx.sess().span_bug(base.span, format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt))[]) + ty_to_string(cx.tcx(), bt)).index(&FullRange)) }; let len = llvm::LLVMConstIntGetZExtValue(len) as u64; @@ -546,8 +546,8 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { ast::ExprTup(ref es) => { let ety = ty::expr_ty(cx.tcx(), e); let repr = adt::represent_type(cx, ety); - let vals = map_list(es[]); - adt::trans_const(cx, &*repr, 0, vals[]) + let vals = map_list(es.index(&FullRange)); + adt::trans_const(cx, &*repr, 0, vals.index(&FullRange)) } ast::ExprStruct(_, ref fs, ref base_opt) => { let ety = ty::expr_ty(cx.tcx(), e); @@ -578,7 +578,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { } } }).collect::>(); - adt::trans_const(cx, &*repr, discr, cs[]) + adt::trans_const(cx, &*repr, discr, cs.index(&FullRange)) }) } ast::ExprVec(ref es) => { @@ -595,9 +595,9 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { }; let vs: Vec<_> = repeat(const_expr(cx, &**elem).0).take(n).collect(); if vs.iter().any(|vi| val_ty(*vi) != llunitty) { - C_struct(cx, vs[], false) + C_struct(cx, vs.index(&FullRange), false) } else { - C_array(llunitty, vs[]) + C_array(llunitty, vs.index(&FullRange)) } } ast::ExprPath(_) => { @@ -645,8 +645,8 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { Some(def::DefStruct(_)) => { let ety = ty::expr_ty(cx.tcx(), e); let repr = adt::represent_type(cx, ety); - let arg_vals = map_list(args[]); - adt::trans_const(cx, &*repr, 0, arg_vals[]) + let arg_vals = map_list(args.index(&FullRange)); + adt::trans_const(cx, &*repr, 0, arg_vals.index(&FullRange)) } Some(def::DefVariant(enum_did, variant_did, _)) => { let ety = ty::expr_ty(cx.tcx(), e); @@ -654,11 +654,11 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { let vinfo = ty::enum_variant_with_id(cx.tcx(), enum_did, variant_did); - let arg_vals = map_list(args[]); + let arg_vals = map_list(args.index(&FullRange)); adt::trans_const(cx, &*repr, vinfo.disr_val, - arg_vals[]) + arg_vals.index(&FullRange)) } _ => cx.sess().span_bug(e.span, "expected a struct or variant def") } diff --git a/src/librustc_trans/trans/context.rs b/src/librustc_trans/trans/context.rs index bced3be01f0d6..35fb34eafb4d0 100644 --- a/src/librustc_trans/trans/context.rs +++ b/src/librustc_trans/trans/context.rs @@ -284,7 +284,7 @@ impl<'tcx> SharedCrateContext<'tcx> { // such as a function name in the module. // 1. http://llvm.org/bugs/show_bug.cgi?id=11479 let llmod_id = format!("{}.{}.rs", crate_name, i); - let local_ccx = LocalCrateContext::new(&shared_ccx, llmod_id[]); + let local_ccx = LocalCrateContext::new(&shared_ccx, llmod_id.index(&FullRange)); shared_ccx.local_ccxs.push(local_ccx); } @@ -374,7 +374,7 @@ impl<'tcx> LocalCrateContext<'tcx> { .target .target .data_layout - []); + .index(&FullRange)); let dbg_cx = if shared.tcx.sess.opts.debuginfo != NoDebugInfo { Some(debuginfo::CrateDebugContext::new(llmod)) @@ -721,7 +721,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { /// currently conservatively bounded to 1 << 47 as that is enough to cover the current usable /// address space on 64-bit ARMv8 and x86_64. pub fn obj_size_bound(&self) -> u64 { - match self.sess().target.target.target_word_size[] { + match self.sess().target.target.target_word_size.index(&FullRange) { "32" => 1 << 31, "64" => 1 << 47, _ => unreachable!() // error handled by config::build_target_config @@ -731,7 +731,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { pub fn report_overbig_object(&self, obj: Ty<'tcx>) -> ! { self.sess().fatal( format!("the type `{}` is too big for the current architecture", - obj.repr(self.tcx()))[]) + obj.repr(self.tcx())).index(&FullRange)) } } diff --git a/src/librustc_trans/trans/controlflow.rs b/src/librustc_trans/trans/controlflow.rs index 768de89d5935d..38d40a8322f90 100644 --- a/src/librustc_trans/trans/controlflow.rs +++ b/src/librustc_trans/trans/controlflow.rs @@ -48,7 +48,7 @@ pub fn trans_stmt<'blk, 'tcx>(cx: Block<'blk, 'tcx>, debug!("trans_stmt({})", s.repr(cx.tcx())); if cx.sess().asm_comments() { - add_span_comment(cx, s.span, s.repr(cx.tcx())[]); + add_span_comment(cx, s.span, s.repr(cx.tcx()).index(&FullRange)); } let mut bcx = cx; @@ -188,7 +188,7 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } let name = format!("then-block-{}-", thn.id); - let then_bcx_in = bcx.fcx.new_id_block(name[], thn.id); + let then_bcx_in = bcx.fcx.new_id_block(name.index(&FullRange), thn.id); let then_bcx_out = trans_block(then_bcx_in, &*thn, dest); trans::debuginfo::clear_source_location(bcx.fcx); @@ -265,7 +265,8 @@ pub fn trans_for<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, pat: &ast::Pat, head: &ast::Expr, body: &ast::Block) - -> Block<'blk, 'tcx> { + -> Block<'blk, 'tcx> +{ let _icx = push_ctxt("trans_for"); // bcx @@ -306,7 +307,9 @@ pub fn trans_for<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, .borrow())[method_call] .ty; let method_type = monomorphize_type(loopback_bcx_in, method_type); - let method_result_type = ty::ty_fn_ret(method_type).unwrap(); + let method_result_type = + ty::assert_no_late_bound_regions( // LB regions are instantiated in invoked methods + loopback_bcx_in.tcx(), &ty::ty_fn_ret(method_type)).unwrap(); let option_cleanup_scope = body_bcx_in.fcx.push_custom_cleanup_scope(); let option_cleanup_scope_id = cleanup::CustomScope(option_cleanup_scope); @@ -436,8 +439,8 @@ pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, match bcx.tcx().def_map.borrow().get(&expr_id) { Some(&def::DefLabel(loop_id)) => loop_id, ref r => { - bcx.tcx().sess.bug(format!("{} in def-map for label", - r)[]) + bcx.tcx().sess.bug(format!("{:?} in def-map for label", + r).index(&FullRange)) } } } @@ -501,7 +504,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let v_str = C_str_slice(ccx, fail_str); let loc = bcx.sess().codemap().lookup_char_pos(sp.lo); - let filename = token::intern_and_get_ident(loc.file.name[]); + let filename = token::intern_and_get_ident(loc.file.name.index(&FullRange)); let filename = C_str_slice(ccx, filename); let line = C_uint(ccx, loc.line); let expr_file_line_const = C_struct(ccx, &[v_str, filename, line], false); @@ -510,7 +513,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let did = langcall(bcx, Some(sp), "", PanicFnLangItem); let bcx = callee::trans_lang_call(bcx, did, - args[], + args.index(&FullRange), Some(expr::Ignore)).bcx; Unreachable(bcx); return bcx; @@ -526,7 +529,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Extract the file/line from the span let loc = bcx.sess().codemap().lookup_char_pos(sp.lo); - let filename = token::intern_and_get_ident(loc.file.name[]); + let filename = token::intern_and_get_ident(loc.file.name.index(&FullRange)); // Invoke the lang item let filename = C_str_slice(ccx, filename); @@ -537,7 +540,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let did = langcall(bcx, Some(sp), "", PanicBoundsCheckFnLangItem); let bcx = callee::trans_lang_call(bcx, did, - args[], + args.index(&FullRange), Some(expr::Ignore)).bcx; Unreachable(bcx); return bcx; diff --git a/src/librustc_trans/trans/datum.rs b/src/librustc_trans/trans/datum.rs index d73b3f6b4e420..26518d4092fa3 100644 --- a/src/librustc_trans/trans/datum.rs +++ b/src/librustc_trans/trans/datum.rs @@ -464,7 +464,7 @@ impl<'tcx> Datum<'tcx, Lvalue> { } _ => bcx.tcx().sess.bug( format!("Unexpected unsized type in get_element: {}", - bcx.ty_to_string(self.ty))[]) + bcx.ty_to_string(self.ty)).index(&FullRange)) }; Datum { val: val, @@ -552,7 +552,7 @@ impl<'tcx, K: KindOps + fmt::Show> Datum<'tcx, K> { #[allow(dead_code)] // useful for debugging pub fn to_string<'a>(&self, ccx: &CrateContext<'a, 'tcx>) -> String { - format!("Datum({}, {}, {})", + format!("Datum({}, {}, {:?})", ccx.tn().val_to_string(self.val), ty_to_string(ccx.tcx(), self.ty), self.kind) diff --git a/src/librustc_trans/trans/debuginfo.rs b/src/librustc_trans/trans/debuginfo.rs index 3f0f7fd9bd303..e2e1b3a799bfc 100644 --- a/src/librustc_trans/trans/debuginfo.rs +++ b/src/librustc_trans/trans/debuginfo.rs @@ -285,7 +285,7 @@ impl<'tcx> TypeMap<'tcx> { metadata: DIType) { if self.type_to_metadata.insert(type_, metadata).is_some() { cx.sess().bug(format!("Type metadata for Ty '{}' is already in the TypeMap!", - ppaux::ty_to_string(cx.tcx(), type_))[]); + ppaux::ty_to_string(cx.tcx(), type_)).index(&FullRange)); } } @@ -298,7 +298,7 @@ impl<'tcx> TypeMap<'tcx> { if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() { let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id); cx.sess().bug(format!("Type metadata for unique id '{}' is already in the TypeMap!", - unique_type_id_str[])[]); + unique_type_id_str.index(&FullRange)).index(&FullRange)); } } @@ -335,13 +335,13 @@ impl<'tcx> TypeMap<'tcx> { // unique ptr (~) -> {~ :pointee-uid:} // @-ptr (@) -> {@ :pointee-uid:} // sized vec ([T; x]) -> {[:size:] :element-uid:} - // unsized vec ([T]) -> {[] :element-uid:} + // unsized vec ([T]) -> {.index(&FullRange) :element-uid:} // trait (T) -> {trait_:svh: / :node-id:_<(:param-uid:),*> } // closure -> { :store-sigil: |(:param-uid:),* <,_...>| -> \ // :return-type-uid: : (:bounds:)*} // function -> { fn( (:param-uid:)* <,_...> ) -> \ // :return-type-uid:} - // unique vec box (~[]) -> {HEAP_VEC_BOX<:pointee-uid:>} + // unique vec box (~.index(&FullRange)) -> {HEAP_VEC_BOX<:pointee-uid:>} // gc box -> {GC_BOX<:pointee-uid:>} match self.type_to_unique_id.get(&type_).cloned() { @@ -379,14 +379,14 @@ impl<'tcx> TypeMap<'tcx> { self.get_unique_type_id_of_type(cx, component_type); let component_type_id = self.get_unique_type_id_as_string(component_type_id); - unique_type_id.push_str(component_type_id[]); + unique_type_id.push_str(component_type_id.index(&FullRange)); } }, ty::ty_uniq(inner_type) => { unique_type_id.push('~'); let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id[]); + unique_type_id.push_str(inner_type_id.index(&FullRange)); }, ty::ty_ptr(ty::mt { ty: inner_type, mutbl } ) => { unique_type_id.push('*'); @@ -396,7 +396,7 @@ impl<'tcx> TypeMap<'tcx> { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id[]); + unique_type_id.push_str(inner_type_id.index(&FullRange)); }, ty::ty_rptr(_, ty::mt { ty: inner_type, mutbl }) => { unique_type_id.push('&'); @@ -406,12 +406,12 @@ impl<'tcx> TypeMap<'tcx> { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id[]); + unique_type_id.push_str(inner_type_id.index(&FullRange)); }, ty::ty_vec(inner_type, optional_length) => { match optional_length { Some(len) => { - unique_type_id.push_str(format!("[{}]", len)[]); + unique_type_id.push_str(format!("[{}]", len).index(&FullRange)); } None => { unique_type_id.push_str("[]"); @@ -420,15 +420,19 @@ impl<'tcx> TypeMap<'tcx> { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id[]); + unique_type_id.push_str(inner_type_id.index(&FullRange)); }, ty::ty_trait(ref trait_data) => { unique_type_id.push_str("trait "); + let principal = + ty::erase_late_bound_regions(cx.tcx(), + &trait_data.principal); + from_def_id_and_substs(self, cx, - trait_data.principal_def_id(), - trait_data.principal.0.substs, + principal.def_id, + principal.substs, &mut unique_type_id); }, ty::ty_bare_fn(_, &ty::BareFnTy{ unsafety, abi, ref sig } ) => { @@ -440,25 +444,27 @@ impl<'tcx> TypeMap<'tcx> { unique_type_id.push_str(" fn("); - for ¶meter_type in sig.0.inputs.iter() { + let sig = ty::erase_late_bound_regions(cx.tcx(), sig); + + for ¶meter_type in sig.inputs.iter() { let parameter_type_id = self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(parameter_type_id[]); + unique_type_id.push_str(parameter_type_id.index(&FullRange)); unique_type_id.push(','); } - if sig.0.variadic { + if sig.variadic { unique_type_id.push_str("..."); } unique_type_id.push_str(")->"); - match sig.0.output { + match sig.output { ty::FnConverging(ret_ty) => { let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty); let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(return_type_id[]); + unique_type_id.push_str(return_type_id.index(&FullRange)); } ty::FnDiverging => { unique_type_id.push_str("!"); @@ -473,9 +479,9 @@ impl<'tcx> TypeMap<'tcx> { &mut unique_type_id); }, _ => { - cx.sess().bug(format!("get_unique_type_id_of_type() - unexpected type: {}, {}", - ppaux::ty_to_string(cx.tcx(), type_)[], - type_.sty)[]) + cx.sess().bug(format!("get_unique_type_id_of_type() - unexpected type: {}, {:?}", + ppaux::ty_to_string(cx.tcx(), type_).index(&FullRange), + type_.sty).index(&FullRange)) } }; @@ -518,7 +524,7 @@ impl<'tcx> TypeMap<'tcx> { output.push_str(crate_hash.as_str()); output.push_str("/"); - output.push_str(format!("{:x}", def_id.node)[]); + output.push_str(format!("{:x}", def_id.node).index(&FullRange)); // Maybe check that there is no self type here. @@ -531,7 +537,7 @@ impl<'tcx> TypeMap<'tcx> { type_map.get_unique_type_id_of_type(cx, type_parameter); let param_type_id = type_map.get_unique_type_id_as_string(param_type_id); - output.push_str(param_type_id[]); + output.push_str(param_type_id.index(&FullRange)); output.push(','); } @@ -568,26 +574,28 @@ impl<'tcx> TypeMap<'tcx> { } }; - for ¶meter_type in sig.0.inputs.iter() { + let sig = ty::erase_late_bound_regions(cx.tcx(), sig); + + for ¶meter_type in sig.inputs.iter() { let parameter_type_id = self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(parameter_type_id[]); + unique_type_id.push_str(parameter_type_id.index(&FullRange)); unique_type_id.push(','); } - if sig.0.variadic { + if sig.variadic { unique_type_id.push_str("..."); } unique_type_id.push_str("|->"); - match sig.0.output { + match sig.output { ty::FnConverging(ret_ty) => { let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty); let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(return_type_id[]); + unique_type_id.push_str(return_type_id.index(&FullRange)); } ty::FnDiverging => { unique_type_id.push_str("!"); @@ -618,7 +626,7 @@ impl<'tcx> TypeMap<'tcx> { let enum_type_id = self.get_unique_type_id_of_type(cx, enum_type); let enum_variant_type_id = format!("{}::{}", self.get_unique_type_id_as_string(enum_type_id) - [], + .index(&FullRange), variant_name); let interner_key = self.unique_id_interner.intern(Rc::new(enum_variant_type_id)); UniqueTypeId(interner_key) @@ -798,20 +806,20 @@ pub fn create_global_var_metadata(cx: &CrateContext, format!("debuginfo::\ create_global_var_metadata() - Captured var-id refers to \ - unexpected ast_item variant: {}", - var_item)[]) + unexpected ast_item variant: {:?}", + var_item).index(&FullRange)) } } }, _ => cx.sess().bug(format!("debuginfo::create_global_var_metadata() \ - Captured var-id refers to unexpected \ - ast_map variant: {}", - var_item)[]) + ast_map variant: {:?}", + var_item).index(&FullRange)) }; let (file_metadata, line_number) = if span != codemap::DUMMY_SP { let loc = span_start(cx, span); - (file_metadata(cx, loc.file.name[]), loc.line as c_uint) + (file_metadata(cx, loc.file.name.index(&FullRange)), loc.line as c_uint) } else { (UNKNOWN_FILE_METADATA, UNKNOWN_LINE_NUMBER) }; @@ -822,7 +830,7 @@ pub fn create_global_var_metadata(cx: &CrateContext, let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id)); let var_name = token::get_ident(ident).get().to_string(); let linkage_name = - namespace_node.mangled_name_of_contained_item(var_name[]); + namespace_node.mangled_name_of_contained_item(var_name.index(&FullRange)); let var_scope = namespace_node.scope; let var_name = CString::from_slice(var_name.as_bytes()); @@ -861,7 +869,7 @@ pub fn create_local_var_metadata(bcx: Block, local: &ast::Local) { None => { bcx.sess().span_bug(span, format!("no entry in lllocals table for {}", - node_id)[]); + node_id).index(&FullRange)); } }; @@ -914,8 +922,8 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, format!( "debuginfo::create_captured_var_metadata() - \ Captured var-id refers to unexpected \ - ast_map variant: {}", - ast_item)[]); + ast_map variant: {:?}", + ast_item).index(&FullRange)); } } } @@ -924,8 +932,8 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, .span_bug(span, format!("debuginfo::create_captured_var_metadata() - \ Captured var-id refers to unexpected \ - ast_map variant: {}", - ast_item)[]); + ast_map variant: {:?}", + ast_item).index(&FullRange)); } }; @@ -955,7 +963,7 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let variable_access = IndirectVariable { alloca: env_pointer, - address_operations: address_operations[..address_op_count] + address_operations: address_operations.index(&(0..address_op_count)) }; declare_local(bcx, @@ -1032,7 +1040,7 @@ pub fn create_argument_metadata(bcx: Block, arg: &ast::Arg) { None => { bcx.sess().span_bug(span, format!("no entry in lllocals table for {}", - node_id)[]); + node_id).index(&FullRange)); } }; @@ -1146,7 +1154,7 @@ pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, if let Some(code_snippet) = code_snippet { let bytes = code_snippet.as_bytes(); - if bytes.len() > 0 && bytes[bytes.len()-1 ..] == b"}" { + if bytes.len() > 0 && bytes.index(&((bytes.len()-1)..)) == b"}" { cleanup_span = Span { lo: node_span.hi - codemap::BytePos(1), hi: node_span.hi, @@ -1290,7 +1298,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match expr.node { ast::ExprClosure(_, _, ref fn_decl, ref top_level_block) => { let name = format!("fn{}", token::gensym("fn")); - let name = token::str_to_ident(name[]); + let name = token::str_to_ident(name.index(&FullRange)); (name, &**fn_decl, // This is not quite right. It should actually inherit // the generics of the enclosing function. @@ -1321,8 +1329,8 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, _ => { cx.sess() .bug(format!("create_function_debug_context: \ - unexpected sort of node: {}", - fnitem)[]) + unexpected sort of node: {:?}", + fnitem).index(&FullRange)) } } } @@ -1332,8 +1340,8 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, return FunctionDebugContext::FunctionWithoutDebugInfo; } _ => cx.sess().bug(format!("create_function_debug_context: \ - unexpected sort of node: {}", - fnitem)[]) + unexpected sort of node: {:?}", + fnitem).index(&FullRange)) }; // This can be the case for functions inlined from another crate @@ -1342,7 +1350,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name[]); + let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); let function_type_metadata = unsafe { let fn_signature = get_function_signature(cx, @@ -1369,7 +1377,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let (linkage_name, containing_scope) = if has_path { let namespace_node = namespace_for_item(cx, ast_util::local_def(fn_ast_id)); let linkage_name = namespace_node.mangled_name_of_contained_item( - function_name[]); + function_name.index(&FullRange)); let containing_scope = namespace_node.scope; (linkage_name, containing_scope) } else { @@ -1457,7 +1465,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, signature.push(type_metadata(cx, arg_type, codemap::DUMMY_SP)); } - return create_DIArray(DIB(cx), signature[]); + return create_DIArray(DIB(cx), signature.index(&FullRange)); } fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, @@ -1492,7 +1500,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, actual_self_type, true); - name_to_append_suffix_to.push_str(actual_self_type_name[]); + name_to_append_suffix_to.push_str(actual_self_type_name.index(&FullRange)); if generics.is_type_parameterized() { name_to_append_suffix_to.push_str(","); @@ -1531,7 +1539,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let actual_type_name = compute_debuginfo_type_name(cx, actual_type, true); - name_to_append_suffix_to.push_str(actual_type_name[]); + name_to_append_suffix_to.push_str(actual_type_name.index(&FullRange)); if index != generics.ty_params.len() - 1 { name_to_append_suffix_to.push_str(","); @@ -1558,7 +1566,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, name_to_append_suffix_to.push('>'); - return create_DIArray(DIB(cx), template_params[]); + return create_DIArray(DIB(cx), template_params.index(&FullRange)); } } @@ -1615,7 +1623,7 @@ fn compile_unit_metadata(cx: &CrateContext) -> DIDescriptor { } }; - debug!("compile_unit_metadata: {}", compile_unit_name); + debug!("compile_unit_metadata: {:?}", compile_unit_name); let producer = format!("rustc version {}", (option_env!("CFG_VERSION")).expect("CFG_VERSION")); @@ -1652,7 +1660,7 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let cx: &CrateContext = bcx.ccx(); let filename = span_start(cx, span).file.name.clone(); - let file_metadata = file_metadata(cx, filename[]); + let file_metadata = file_metadata(cx, filename.index(&FullRange)); let name = token::get_ident(variable_ident); let loc = span_start(cx, span); @@ -1738,7 +1746,7 @@ fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile { let work_dir = cx.sess().working_dir.as_str().unwrap(); let file_name = if full_path.starts_with(work_dir) { - full_path[work_dir.len() + 1u..full_path.len()] + full_path.index(&((work_dir.len() + 1u)..full_path.len())) } else { full_path }; @@ -1769,8 +1777,8 @@ fn scope_metadata(fcx: &FunctionContext, let node = fcx.ccx.tcx().map.get(node_id); fcx.ccx.sess().span_bug(error_reporting_span, - format!("debuginfo: Could not find scope info for node {}", - node)[]); + format!("debuginfo: Could not find scope info for node {:?}", + node).index(&FullRange)); } } } @@ -1789,7 +1797,7 @@ fn diverging_type_metadata(cx: &CrateContext) -> DIType { fn basic_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> DIType { - debug!("basic_type_metadata: {}", t); + debug!("basic_type_metadata: {:?}", t); let (name, encoding) = match t.sty { ty::ty_tup(ref elements) if elements.is_empty() => @@ -1797,14 +1805,14 @@ fn basic_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::ty_bool => ("bool".to_string(), DW_ATE_boolean), ty::ty_char => ("char".to_string(), DW_ATE_unsigned_char), ty::ty_int(int_ty) => match int_ty { - ast::TyI => ("int".to_string(), DW_ATE_signed), + ast::TyIs => ("isize".to_string(), DW_ATE_signed), ast::TyI8 => ("i8".to_string(), DW_ATE_signed), ast::TyI16 => ("i16".to_string(), DW_ATE_signed), ast::TyI32 => ("i32".to_string(), DW_ATE_signed), ast::TyI64 => ("i64".to_string(), DW_ATE_signed) }, ty::ty_uint(uint_ty) => match uint_ty { - ast::TyU => ("uint".to_string(), DW_ATE_unsigned), + ast::TyUs => ("usize".to_string(), DW_ATE_unsigned), ast::TyU8 => ("u8".to_string(), DW_ATE_unsigned), ast::TyU16 => ("u16".to_string(), DW_ATE_unsigned), ast::TyU32 => ("u32".to_string(), DW_ATE_unsigned), @@ -1966,7 +1974,7 @@ impl<'tcx> RecursiveTypeDescription<'tcx> { cx.sess().bug(format!("Forward declaration of potentially recursive type \ '{}' was not found in TypeMap!", ppaux::ty_to_string(cx.tcx(), unfinished_type)) - []); + .index(&FullRange)); } } @@ -1978,7 +1986,7 @@ impl<'tcx> RecursiveTypeDescription<'tcx> { set_members_of_composite_type(cx, metadata_stub, llvm_type, - member_descriptions[]); + member_descriptions.index(&FullRange)); return MetadataCreationResult::new(metadata_stub, true); } } @@ -2050,7 +2058,7 @@ fn prepare_struct_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let struct_metadata_stub = create_struct_stub(cx, struct_llvm_type, - struct_name[], + struct_name.index(&FullRange), unique_type_id, containing_scope); @@ -2111,7 +2119,7 @@ fn prepare_tuple_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, unique_type_id, create_struct_stub(cx, tuple_llvm_type, - tuple_name[], + tuple_name.index(&FullRange), unique_type_id, UNKNOWN_SCOPE_METADATA), tuple_llvm_type, @@ -2171,7 +2179,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - member_descriptions[]); + member_descriptions.index(&FullRange)); MemberDescription { name: "".to_string(), llvm_type: variant_llvm_type, @@ -2204,7 +2212,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - member_descriptions[]); + member_descriptions.index(&FullRange)); vec![ MemberDescription { name: "".to_string(), @@ -2304,7 +2312,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - variant_member_descriptions[]); + variant_member_descriptions.index(&FullRange)); // Encode the information about the null variant in the union // member's name. @@ -2383,7 +2391,7 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, .iter() .map(|&t| type_of::type_of(cx, t)) .collect::>() - [], + .index(&FullRange), struct_def.packed); // Could do some consistency checks here: size, align, field count, discr type @@ -2450,7 +2458,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id); let loc = span_start(cx, definition_span); - let file_metadata = file_metadata(cx, loc.file.name[]); + let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); let variants = ty::enum_variants(cx.tcx(), enum_def_id); @@ -2637,7 +2645,7 @@ fn set_members_of_composite_type(cx: &CrateContext, Please use a rustc built with anewer \ version of LLVM.", llvm_version_major, - llvm_version_minor)[]); + llvm_version_minor).index(&FullRange)); } else { cx.sess().bug("debuginfo::set_members_of_composite_type() - \ Already completed forward declaration re-encountered."); @@ -2675,7 +2683,7 @@ fn set_members_of_composite_type(cx: &CrateContext, .collect(); unsafe { - let type_array = create_DIArray(DIB(cx), member_metadata[]); + let type_array = create_DIArray(DIB(cx), member_metadata.index(&FullRange)); llvm::LLVMDICompositeTypeSetTypeArray(composite_type_metadata, type_array); } } @@ -2774,7 +2782,7 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let member_llvm_types = slice_llvm_type.field_types(); assert!(slice_layout_is_correct(cx, - member_llvm_types[], + member_llvm_types.index(&FullRange), element_type)); let member_descriptions = [ MemberDescription { @@ -2796,11 +2804,11 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, assert!(member_descriptions.len() == member_llvm_types.len()); let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name[]); + let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); let metadata = composite_type_metadata(cx, slice_llvm_type, - slice_type_name[], + slice_type_name.index(&FullRange), unique_type_id, &member_descriptions, UNKNOWN_SCOPE_METADATA, @@ -2822,11 +2830,14 @@ fn subroutine_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, unique_type_id: UniqueTypeId, signature: &ty::PolyFnSig<'tcx>, span: Span) - -> MetadataCreationResult { - let mut signature_metadata: Vec = Vec::with_capacity(signature.0.inputs.len() + 1); + -> MetadataCreationResult +{ + let signature = ty::erase_late_bound_regions(cx.tcx(), signature); + + let mut signature_metadata: Vec = Vec::with_capacity(signature.inputs.len() + 1); // return type - signature_metadata.push(match signature.0.output { + signature_metadata.push(match signature.output { ty::FnConverging(ret_ty) => match ret_ty.sty { ty::ty_tup(ref tys) if tys.is_empty() => ptr::null_mut(), _ => type_metadata(cx, ret_ty, span) @@ -2835,7 +2846,7 @@ fn subroutine_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, }); // regular arguments - for &argument_type in signature.0.inputs.iter() { + for &argument_type in signature.inputs.iter() { signature_metadata.push(type_metadata(cx, argument_type, span)); } @@ -2846,7 +2857,7 @@ fn subroutine_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, llvm::LLVMDIBuilderCreateSubroutineType( DIB(cx), UNKNOWN_FILE_METADATA, - create_DIArray(DIB(cx), signature_metadata[])) + create_DIArray(DIB(cx), signature_metadata.index(&FullRange))) }, false); } @@ -2872,7 +2883,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_type); cx.sess().bug(format!("debuginfo: Unexpected trait-object type in \ trait_pointer_metadata(): {}", - pp_type_name[])[]); + pp_type_name.index(&FullRange)).index(&FullRange)); } }; @@ -2886,7 +2897,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, composite_type_metadata(cx, trait_llvm_type, - trait_type_name[], + trait_type_name.index(&FullRange), unique_type_id, &[], containing_scope, @@ -2931,7 +2942,7 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } }; - debug!("type_metadata: {}", t); + debug!("type_metadata: {:?}", t); let sty = &t.sty; let MetadataCreationResult { metadata, already_stored_in_typemap } = match *sty { @@ -3006,13 +3017,13 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::ty_tup(ref elements) => { prepare_tuple_metadata(cx, t, - elements[], + elements.index(&FullRange), unique_type_id, usage_site_span).finalize(cx) } _ => { - cx.sess().bug(format!("debuginfo: unexpected type in type_metadata: {}", - sty)[]) + cx.sess().bug(format!("debuginfo: unexpected type in type_metadata: {:?}", + sty).index(&FullRange)) } }; @@ -3030,9 +3041,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, type id '{}' to already be in \ the debuginfo::TypeMap but it \ was not. (Ty = {})", - unique_type_id_str[], + unique_type_id_str.index(&FullRange), ppaux::ty_to_string(cx.tcx(), t)); - cx.sess().span_bug(usage_site_span, error_message[]); + cx.sess().span_bug(usage_site_span, error_message.index(&FullRange)); } }; @@ -3045,9 +3056,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, UniqueTypeId maps in \ debuginfo::TypeMap. \ UniqueTypeId={}, Ty={}", - unique_type_id_str[], + unique_type_id_str.index(&FullRange), ppaux::ty_to_string(cx.tcx(), t)); - cx.sess().span_bug(usage_site_span, error_message[]); + cx.sess().span_bug(usage_site_span, error_message.index(&FullRange)); } } None => { @@ -3253,7 +3264,7 @@ fn create_scope_map(cx: &CrateContext, { // Create a new lexical scope and push it onto the stack let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo); - let file_metadata = file_metadata(cx, loc.file.name[]); + let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); let parent_scope = scope_stack.last().unwrap().scope_metadata; let scope_metadata = unsafe { @@ -3375,7 +3386,7 @@ fn create_scope_map(cx: &CrateContext, if need_new_scope { // Create a new lexical scope and push it onto the stack let loc = cx.sess().codemap().lookup_char_pos(pat.span.lo); - let file_metadata = file_metadata(cx, loc.file.name[]); + let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); let parent_scope = scope_stack.last().unwrap().scope_metadata; let scope_metadata = unsafe { @@ -3729,12 +3740,12 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::ty_bool => output.push_str("bool"), ty::ty_char => output.push_str("char"), ty::ty_str => output.push_str("str"), - ty::ty_int(ast::TyI) => output.push_str("int"), + ty::ty_int(ast::TyIs) => output.push_str("isize"), ty::ty_int(ast::TyI8) => output.push_str("i8"), ty::ty_int(ast::TyI16) => output.push_str("i16"), ty::ty_int(ast::TyI32) => output.push_str("i32"), ty::ty_int(ast::TyI64) => output.push_str("i64"), - ty::ty_uint(ast::TyU) => output.push_str("uint"), + ty::ty_uint(ast::TyUs) => output.push_str("usize"), ty::ty_uint(ast::TyU8) => output.push_str("u8"), ty::ty_uint(ast::TyU16) => output.push_str("u16"), ty::ty_uint(ast::TyU32) => output.push_str("u32"), @@ -3794,8 +3805,9 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, output.push(']'); }, ty::ty_trait(ref trait_data) => { - push_item_name(cx, trait_data.principal_def_id(), false, output); - push_type_params(cx, trait_data.principal.0.substs, output); + let principal = ty::erase_late_bound_regions(cx.tcx(), &trait_data.principal); + push_item_name(cx, principal.def_id, false, output); + push_type_params(cx, principal.substs, output); }, ty::ty_bare_fn(_, &ty::BareFnTy{ unsafety, abi, ref sig } ) => { if unsafety == ast::Unsafety::Unsafe { @@ -3810,8 +3822,9 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, output.push_str("fn("); - if sig.0.inputs.len() > 0 { - for ¶meter_type in sig.0.inputs.iter() { + let sig = ty::erase_late_bound_regions(cx.tcx(), sig); + if sig.inputs.len() > 0 { + for ¶meter_type in sig.inputs.iter() { push_debuginfo_type_name(cx, parameter_type, true, output); output.push_str(", "); } @@ -3819,8 +3832,8 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, output.pop(); } - if sig.0.variadic { - if sig.0.inputs.len() > 0 { + if sig.variadic { + if sig.inputs.len() > 0 { output.push_str(", ..."); } else { output.push_str("..."); @@ -3829,7 +3842,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, output.push(')'); - match sig.0.output { + match sig.output { ty::FnConverging(result_type) if ty::type_is_nil(result_type) => {} ty::FnConverging(result_type) => { output.push_str(" -> "); @@ -3849,7 +3862,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::ty_projection(..) | ty::ty_param(_) => { cx.sess().bug(format!("debuginfo: Trying to create type name for \ - unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t))[]); + unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange)); } } @@ -3932,13 +3945,13 @@ impl NamespaceTreeNode { None => {} } let string = token::get_name(node.name); - output.push_str(format!("{}", string.get().len())[]); + output.push_str(format!("{}", string.get().len()).index(&FullRange)); output.push_str(string.get()); } let mut name = String::from_str("_ZN"); fill_nested(self, &mut name); - name.push_str(format!("{}", item_name.len())[]); + name.push_str(format!("{}", item_name.len()).index(&FullRange)); name.push_str(item_name); name.push('E'); name @@ -3946,7 +3959,7 @@ impl NamespaceTreeNode { } fn crate_root_namespace<'a>(cx: &'a CrateContext) -> &'a str { - cx.link_meta().crate_name[] + cx.link_meta().crate_name.index(&FullRange) } fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc { @@ -4022,8 +4035,8 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc node, None => { cx.sess().bug(format!("debuginfo::namespace_for_item(): \ - path too short for {}", - def_id)[]); + path too short for {:?}", + def_id).index(&FullRange)); } } }) diff --git a/src/librustc_trans/trans/expr.rs b/src/librustc_trans/trans/expr.rs index 9221ae09df98a..120e2e955e4be 100644 --- a/src/librustc_trans/trans/expr.rs +++ b/src/librustc_trans/trans/expr.rs @@ -306,7 +306,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // FIXME(#19596) workaround: `|t| t` causes monomorphization recursion fn identity(t: T) -> T { t } - debug!("unsized_info(kind={}, id={}, unadjusted_ty={})", + debug!("unsized_info(kind={:?}, id={}, unadjusted_ty={})", kind, id, unadjusted_ty.repr(bcx.tcx())); match kind { &ty::UnsizeLength(len) => C_uint(bcx.ccx(), len), @@ -318,7 +318,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, unsized_info(bcx, k, id, ty_substs[tp_index], identity) } _ => bcx.sess().bug(format!("UnsizeStruct with bad sty: {}", - bcx.ty_to_string(unadjusted_ty))[]) + bcx.ty_to_string(unadjusted_ty)).index(&FullRange)) }, &ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => { // Note that we preserve binding levels here: @@ -451,7 +451,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let unboxed_ty = match datum_ty.sty { ty::ty_uniq(t) => t, _ => bcx.sess().bug(format!("Expected ty_uniq, found {}", - bcx.ty_to_string(datum_ty))[]) + bcx.ty_to_string(datum_ty)).index(&FullRange)) }; let result_ty = ty::mk_uniq(tcx, ty::unsize_ty(tcx, unboxed_ty, k, expr.span)); @@ -573,40 +573,7 @@ fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_rec_tup_field(bcx, &**base, idx.node) } ast::ExprIndex(ref base, ref idx) => { - match idx.node { - ast::ExprRange(ref start, ref end) => { - // Special case for slicing syntax (KILLME). - let _icx = push_ctxt("trans_slice"); - let ccx = bcx.ccx(); - - let method_call = MethodCall::expr(expr.id); - let method_ty = ccx.tcx() - .method_map - .borrow() - .get(&method_call) - .map(|method| method.ty); - let base_datum = unpack_datum!(bcx, trans(bcx, &**base)); - - let mut args = vec![]; - start.as_ref().map(|e| args.push((unpack_datum!(bcx, trans(bcx, &**e)), e.id))); - end.as_ref().map(|e| args.push((unpack_datum!(bcx, trans(bcx, &**e)), e.id))); - - let result_ty = ty::ty_fn_ret(monomorphize_type(bcx, - method_ty.unwrap())).unwrap(); - let scratch = rvalue_scratch_datum(bcx, result_ty, "trans_slice"); - - unpack_result!(bcx, - trans_overloaded_op(bcx, - expr, - method_call, - base_datum, - args, - Some(SaveIn(scratch.val)), - true)); - DatumBlock::new(bcx, scratch.to_expr_datum()) - } - _ => trans_index(bcx, expr, &**base, &**idx, MethodCall::expr(expr.id)) - } + trans_index(bcx, expr, &**base, &**idx, MethodCall::expr(expr.id)) } ast::ExprBox(_, ref contents) => { // Special case for `Box` @@ -656,8 +623,8 @@ fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.tcx().sess.span_bug( expr.span, format!("trans_rvalue_datum_unadjusted reached \ - fall-through case: {}", - expr.node)[]); + fall-through case: {:?}", + expr.node).index(&FullRange)); } } } @@ -732,12 +699,16 @@ fn trans_index<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, .map(|method| method.ty); let elt_datum = match method_ty { Some(method_ty) => { + let method_ty = monomorphize_type(bcx, method_ty); + let base_datum = unpack_datum!(bcx, trans(bcx, base)); // Translate index expression. let ix_datum = unpack_datum!(bcx, trans(bcx, idx)); - let ref_ty = ty::ty_fn_ret(monomorphize_type(bcx, method_ty)).unwrap(); + let ref_ty = // invoked methods have LB regions instantiated: + ty::assert_no_late_bound_regions( + bcx.tcx(), &ty::ty_fn_ret(method_ty)).unwrap(); let elt_ty = match ty::deref(ref_ty, true) { None => { bcx.tcx().sess.span_bug(index_expr.span, @@ -1005,8 +976,8 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.tcx().sess.span_bug( expr.span, format!("trans_rvalue_stmt_unadjusted reached \ - fall-through case: {}", - expr.node)[]); + fall-through case: {:?}", + expr.node).index(&FullRange)); } } } @@ -1032,14 +1003,14 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest) } ast::ExprMatch(ref discr, ref arms, _) => { - _match::trans_match(bcx, expr, &**discr, arms[], dest) + _match::trans_match(bcx, expr, &**discr, arms.index(&FullRange), dest) } ast::ExprBlock(ref blk) => { controlflow::trans_block(bcx, &**blk, dest) } ast::ExprStruct(_, ref fields, ref base) => { trans_struct(bcx, - fields[], + fields.index(&FullRange), base.as_ref().map(|e| &**e), expr.span, expr.id, @@ -1104,7 +1075,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_adt(bcx, expr_ty(bcx, expr), 0, - numbered_fields[], + numbered_fields.index(&FullRange), None, dest, Some(NodeInfo { id: expr.id, span: expr.span })) @@ -1148,13 +1119,13 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_overloaded_call(bcx, expr, &**f, - args[], + args.index(&FullRange), Some(dest)) } else { callee::trans_call(bcx, expr, &**f, - callee::ArgExprs(args[]), + callee::ArgExprs(args.index(&FullRange)), dest) } } @@ -1162,7 +1133,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, callee::trans_method_call(bcx, expr, &*args[0], - callee::ArgExprs(args[]), + callee::ArgExprs(args.index(&FullRange)), dest) } ast::ExprBinary(op, ref lhs, ref rhs) => { @@ -1210,8 +1181,8 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.tcx().sess.span_bug( expr.span, format!("trans_rvalue_dps_unadjusted reached fall-through \ - case: {}", - expr.node)[]); + case: {:?}", + expr.node).index(&FullRange)); } } } @@ -1260,8 +1231,8 @@ fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } _ => { bcx.tcx().sess.span_bug(ref_expr.span, format!( - "Non-DPS def {} referened by {}", - def, bcx.node_id_to_string(ref_expr.id))[]); + "Non-DPS def {:?} referened by {}", + def, bcx.node_id_to_string(ref_expr.id)).index(&FullRange)); } } } @@ -1288,9 +1259,9 @@ pub fn trans_def_fn_unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, } _ => { ccx.tcx().sess.span_bug(ref_expr.span, format!( - "trans_def_fn_unadjusted invoked on: {} for {}", + "trans_def_fn_unadjusted invoked on: {:?} for {}", def, - ref_expr.repr(ccx.tcx()))[]); + ref_expr.repr(ccx.tcx())).index(&FullRange)); } } } @@ -1310,7 +1281,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => { bcx.sess().bug(format!( "trans_local_var: no llval for upvar {} found", - nid)[]); + nid).index(&FullRange)); } } } @@ -1320,7 +1291,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => { bcx.sess().bug(format!( "trans_local_var: no datum for local/arg {} found", - nid)[]); + nid).index(&FullRange)); } }; debug!("take_local(nid={}, v={}, ty={})", @@ -1329,8 +1300,8 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } _ => { bcx.sess().unimpl(format!( - "unsupported def type in trans_local_var: {}", - def)[]); + "unsupported def type in trans_local_var: {:?}", + def).index(&FullRange)); } } } @@ -1347,11 +1318,11 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, { match ty.sty { ty::ty_struct(did, substs) => { - op(0, struct_fields(tcx, did, substs)[]) + op(0, struct_fields(tcx, did, substs).index(&FullRange)) } ty::ty_tup(ref v) => { - op(0, tup_fields(v[])[]) + op(0, tup_fields(v.index(&FullRange)).index(&FullRange)) } ty::ty_enum(_, substs) => { @@ -1361,7 +1332,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, tcx.sess.bug(format!( "cannot get field types from the enum type {} \ without a node ID", - ty.repr(tcx))[]); + ty.repr(tcx)).index(&FullRange)); } Some(node_id) => { let def = tcx.def_map.borrow()[node_id].clone(); @@ -1372,7 +1343,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, op(variant_info.disr_val, struct_fields(tcx, variant_id, - substs)[]) + substs).index(&FullRange)) } _ => { tcx.sess.bug("resolve didn't map this expr to a \ @@ -1386,7 +1357,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, _ => { tcx.sess.bug(format!( "cannot get field types from the type {}", - ty.repr(tcx))[]); + ty.repr(tcx)).index(&FullRange)); } } } @@ -1402,13 +1373,13 @@ fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let tcx = bcx.tcx(); with_field_tys(tcx, ty, Some(expr_id), |discr, field_tys| { - let mut need_base: Vec<_> = repeat(true).take(field_tys.len()).collect(); + let mut need_base: Vec = repeat(true).take(field_tys.len()).collect(); let numbered_fields = fields.iter().map(|field| { let opt_pos = field_tys.iter().position(|field_ty| field_ty.name == field.ident.node.name); - match opt_pos { + let result = match opt_pos { Some(i) => { need_base[i] = false; (i, &*field.expr) @@ -1417,14 +1388,15 @@ fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, tcx.sess.span_bug(field.span, "Couldn't find field in struct type") } - } + }; + result }).collect::>(); let optbase = match base { Some(base_expr) => { let mut leftovers = Vec::new(); for (i, b) in need_base.iter().enumerate() { if *b { - leftovers.push((i, field_tys[i].mt.ty)) + leftovers.push((i, field_tys[i].mt.ty)); } } Some(StructBaseInfo {expr: base_expr, @@ -1441,7 +1413,7 @@ fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_adt(bcx, ty, discr, - numbered_fields[], + numbered_fields.as_slice(), optbase, dest, Some(NodeInfo { id: expr_id, span: expr_span })) @@ -2074,20 +2046,20 @@ fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out), _ => { ccx.sess().bug(format!("translating unsupported cast: \ - {} ({}) -> {} ({})", + {} ({:?}) -> {} ({:?})", t_in.repr(bcx.tcx()), k_in, t_out.repr(bcx.tcx()), - k_out)[]) + k_out).index(&FullRange)) } } } _ => ccx.sess().bug(format!("translating unsupported cast: \ - {} ({}) -> {} ({})", + {} ({:?}) -> {} ({:?})", t_in.repr(bcx.tcx()), k_in, t_out.repr(bcx.tcx()), - k_out)[]) + k_out).index(&FullRange)) }; return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock(); } @@ -2169,7 +2141,7 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, -> DatumBlock<'blk, 'tcx, Expr> { let ccx = bcx.ccx(); - debug!("deref_once(expr={}, datum={}, method_call={})", + debug!("deref_once(expr={}, datum={}, method_call={:?})", expr.repr(bcx.tcx()), datum.to_string(ccx), method_call); @@ -2181,6 +2153,8 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, .get(&method_call).map(|method| method.ty); let datum = match method_ty { Some(method_ty) => { + let method_ty = monomorphize_type(bcx, method_ty); + // Overloaded. Evaluate `trans_overloaded_op`, which will // invoke the user's deref() method, which basically // converts from the `Smaht` pointer that we have into @@ -2192,7 +2166,9 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => datum }; - let ref_ty = ty::ty_fn_ret(monomorphize_type(bcx, method_ty)).unwrap(); + let ref_ty = // invoked methods have their LB regions instantiated + ty::assert_no_late_bound_regions( + ccx.tcx(), &ty::ty_fn_ret(method_ty)).unwrap(); let scratch = rvalue_scratch_datum(bcx, ref_ty, "overloaded_deref"); unpack_result!(bcx, trans_overloaded_op(bcx, expr, method_call, @@ -2249,11 +2225,11 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.tcx().sess.span_bug( expr.span, format!("deref invoked on expr of illegal type {}", - datum.ty.repr(bcx.tcx()))[]); + datum.ty.repr(bcx.tcx())).index(&FullRange)); } }; - debug!("deref_once(expr={}, method_call={}, result={})", + debug!("deref_once(expr={}, method_call={:?}, result={})", expr.id, method_call, r.datum.to_string(ccx)); return r; diff --git a/src/librustc_trans/trans/foreign.rs b/src/librustc_trans/trans/foreign.rs index 1c9be6ae4a8ba..25eb66ab2eb73 100644 --- a/src/librustc_trans/trans/foreign.rs +++ b/src/librustc_trans/trans/foreign.rs @@ -43,7 +43,7 @@ use util::ppaux::Repr; struct ForeignTypes<'tcx> { /// Rust signature of the function - fn_sig: ty::PolyFnSig<'tcx>, + fn_sig: ty::FnSig<'tcx>, /// Adapter object for handling native ABI rules (trust me, you /// don't want to know) @@ -109,7 +109,7 @@ pub fn register_static(ccx: &CrateContext, let llty = type_of::type_of(ccx, ty); let ident = link_name(foreign_item); - match attr::first_attr_value_str_by_name(foreign_item.attrs[], + match attr::first_attr_value_str_by_name(foreign_item.attrs.index(&FullRange), "linkage") { // If this is a static with a linkage specified, then we need to handle // it a little specially. The typesystem prevents things like &T and @@ -180,7 +180,7 @@ pub fn register_foreign_item_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // Make sure the calling convention is right for variadic functions // (should've been caught if not in typeck) - if tys.fn_sig.0.variadic { + if tys.fn_sig.variadic { assert!(cc == llvm::CCallConv); } @@ -218,7 +218,8 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, llretptr: ValueRef, llargs_rust: &[ValueRef], passed_arg_tys: Vec>) - -> Block<'blk, 'tcx> { + -> Block<'blk, 'tcx> +{ let ccx = bcx.ccx(); let tcx = bcx.tcx(); @@ -230,16 +231,17 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ccx.tn().val_to_string(llretptr)); let (fn_abi, fn_sig) = match callee_ty.sty { - ty::ty_bare_fn(_, ref fn_ty) => (fn_ty.abi, fn_ty.sig.clone()), + ty::ty_bare_fn(_, ref fn_ty) => (fn_ty.abi, &fn_ty.sig), _ => ccx.sess().bug("trans_native_call called on non-function type") }; - let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys[]); + let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig); + let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys.index(&FullRange)); let fn_type = cabi::compute_abi_info(ccx, - llsig.llarg_tys[], + llsig.llarg_tys.index(&FullRange), llsig.llret_ty, llsig.ret_def); - let arg_tys: &[cabi::ArgType] = fn_type.arg_tys[]; + let arg_tys: &[cabi::ArgType] = fn_type.arg_tys.index(&FullRange); let mut llargs_foreign = Vec::new(); @@ -365,7 +367,7 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let llforeign_retval = CallWithConv(bcx, llfn, - llargs_foreign[], + llargs_foreign.index(&FullRange), cc, Some(attrs)); @@ -387,7 +389,7 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, debug!("llforeign_ret_ty={}", ccx.tn().type_to_string(llforeign_ret_ty)); if llrust_ret_ty == llforeign_ret_ty { - match fn_sig.0.output { + match fn_sig.output { ty::FnConverging(result_ty) => { base::store_ty(bcx, llforeign_retval, llretptr, result_ty) } @@ -435,7 +437,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) { abi => { let ty = ty::node_id_to_type(ccx.tcx(), foreign_item.id); register_foreign_item_fn(ccx, abi, ty, - lname.get()[]); + lname.get().index(&FullRange)); // Unlike for other items, we shouldn't call // `base::update_linkage` here. Foreign items have // special linkage requirements, which are handled @@ -567,7 +569,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ccx.sess().bug(format!("build_rust_fn: extern fn {} has ty {}, \ expected a bare fn ty", ccx.tcx().map.path_to_string(id), - t.repr(tcx))[]); + t.repr(tcx)).index(&FullRange)); } }; @@ -575,7 +577,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ccx.tcx().map.path_to_string(id), id, t.repr(tcx)); - let llfn = base::decl_internal_rust_fn(ccx, t, ps[]); + let llfn = base::decl_internal_rust_fn(ccx, t, ps.index(&FullRange)); base::set_llvm_fn_attrs(ccx, attrs, llfn); base::trans_fn(ccx, decl, body, llfn, param_substs, id, &[]); llfn @@ -635,7 +637,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, }; // Push Rust return pointer, using null if it will be unused. - let rust_uses_outptr = match tys.fn_sig.0.output { + let rust_uses_outptr = match tys.fn_sig.output { ty::FnConverging(ret_ty) => type_of::return_uses_outptr(ccx, ret_ty), ty::FnDiverging => false }; @@ -668,7 +670,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, return_ty={}", ccx.tn().val_to_string(slot), ccx.tn().type_to_string(llrust_ret_ty), - tys.fn_sig.0.output.repr(tcx)); + tys.fn_sig.output.repr(tcx)); llrust_args.push(slot); return_alloca = Some(slot); } @@ -683,8 +685,8 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // Build up the arguments to the call to the rust function. // Careful to adapt for cases where the native convention uses // a pointer and Rust does not or vice versa. - for i in range(0, tys.fn_sig.0.inputs.len()) { - let rust_ty = tys.fn_sig.0.inputs[i]; + for i in range(0, tys.fn_sig.inputs.len()) { + let rust_ty = tys.fn_sig.inputs[i]; let llrust_ty = tys.llsig.llarg_tys[i]; let rust_indirect = type_of::arg_is_indirect(ccx, rust_ty); let llforeign_arg_ty = tys.fn_ty.arg_tys[i]; @@ -748,7 +750,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, debug!("calling llrustfn = {}, t = {}", ccx.tn().val_to_string(llrustfn), t.repr(ccx.tcx())); let attributes = base::get_fn_llvm_attributes(ccx, t); - let llrust_ret_val = builder.call(llrustfn, llrust_args[], Some(attributes)); + let llrust_ret_val = builder.call(llrustfn, llrust_args.as_slice(), Some(attributes)); // Get the return value where the foreign fn expects it. let llforeign_ret_ty = match tys.fn_ty.ret_ty.cast { @@ -815,9 +817,9 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // the massive simplifications that have occurred. pub fn link_name(i: &ast::ForeignItem) -> InternedString { - match attr::first_attr_value_str_by_name(i.attrs[], "link_name") { + match attr::first_attr_value_str_by_name(i.attrs.index(&FullRange), "link_name") { Some(ln) => ln.clone(), - None => match weak_lang_items::link_name(i.attrs[]) { + None => match weak_lang_items::link_name(i.attrs.index(&FullRange)) { Some(name) => name, None => token::get_ident(i.ident), } @@ -829,10 +831,11 @@ pub fn link_name(i: &ast::ForeignItem) -> InternedString { /// because foreign functions just plain ignore modes. They also don't pass aggregate values by /// pointer like we do. fn foreign_signature<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - fn_sig: &ty::PolyFnSig<'tcx>, arg_tys: &[Ty<'tcx>]) + fn_sig: &ty::FnSig<'tcx>, + arg_tys: &[Ty<'tcx>]) -> LlvmSignature { let llarg_tys = arg_tys.iter().map(|&arg| arg_type_of(ccx, arg)).collect(); - let (llret_ty, ret_def) = match fn_sig.0.output { + let (llret_ty, ret_def) = match fn_sig.output { ty::FnConverging(ret_ty) => (type_of::arg_type_of(ccx, ret_ty), !return_type_is_void(ccx, ret_ty)), ty::FnDiverging => @@ -853,12 +856,13 @@ fn foreign_types_for_id<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn foreign_types_for_fn_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> ForeignTypes<'tcx> { let fn_sig = match ty.sty { - ty::ty_bare_fn(_, ref fn_ty) => fn_ty.sig.clone(), + ty::ty_bare_fn(_, ref fn_ty) => &fn_ty.sig, _ => ccx.sess().bug("foreign_types_for_fn_ty called on non-function type") }; - let llsig = foreign_signature(ccx, &fn_sig, fn_sig.0.inputs.as_slice()); + let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig); + let llsig = foreign_signature(ccx, &fn_sig, fn_sig.inputs.as_slice()); let fn_ty = cabi::compute_abi_info(ccx, - llsig.llarg_tys[], + llsig.llarg_tys.index(&FullRange), llsig.llret_ty, llsig.ret_def); debug!("foreign_types_for_fn_ty(\ @@ -867,9 +871,9 @@ fn foreign_types_for_fn_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty={} -> {}, \ ret_def={}", ty.repr(ccx.tcx()), - ccx.tn().types_to_str(llsig.llarg_tys[]), + ccx.tn().types_to_str(llsig.llarg_tys.index(&FullRange)), ccx.tn().type_to_string(llsig.llret_ty), - ccx.tn().types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::>()[]), + ccx.tn().types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::>().as_slice()), ccx.tn().type_to_string(fn_ty.ret_ty.ty), llsig.ret_def); @@ -916,10 +920,10 @@ fn lltype_for_fn_from_foreign_types(ccx: &CrateContext, tys: &ForeignTypes) -> T llargument_tys.push(llarg_ty); } - if tys.fn_sig.0.variadic { + if tys.fn_sig.variadic { Type::variadic_func(llargument_tys.as_slice(), &llreturn_ty) } else { - Type::func(llargument_tys[], &llreturn_ty) + Type::func(llargument_tys.index(&FullRange), &llreturn_ty) } } diff --git a/src/librustc_trans/trans/glue.rs b/src/librustc_trans/trans/glue.rs index 2fd9031fdfe4c..52e7a986d7e98 100644 --- a/src/librustc_trans/trans/glue.rs +++ b/src/librustc_trans/trans/glue.rs @@ -161,7 +161,7 @@ pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Val let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) { Some(old_sym) => { - let glue = decl_cdecl_fn(ccx, old_sym[], llfnty, ty::mk_nil(ccx.tcx())); + let glue = decl_cdecl_fn(ccx, old_sym.index(&FullRange), llfnty, ty::mk_nil(ccx.tcx())); (glue, None) }, None => { @@ -212,7 +212,8 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, dtor_did: ast::DefId, class_did: ast::DefId, substs: &subst::Substs<'tcx>) - -> Block<'blk, 'tcx> { + -> Block<'blk, 'tcx> +{ let repr = adt::represent_type(bcx.ccx(), t); // Find and call the actual destructor @@ -228,11 +229,12 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let fty = ty::lookup_item_type(bcx.tcx(), dtor_did).ty.subst(bcx.tcx(), substs); let self_ty = match fty.sty { ty::ty_bare_fn(_, ref f) => { - assert!(f.sig.0.inputs.len() == 1); - f.sig.0.inputs[0] + let sig = ty::erase_late_bound_regions(bcx.tcx(), &f.sig); + assert!(sig.inputs.len() == 1); + sig.inputs[0] } _ => bcx.sess().bug(format!("Expected function type, found {}", - bcx.ty_to_string(fty))[]) + bcx.ty_to_string(fty)).index(&FullRange)) }; let (struct_data, info) = if type_is_sized(bcx.tcx(), t) { @@ -293,7 +295,7 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, class_did, &[get_drop_glue_type(bcx.ccx(), t)], ty::mk_nil(bcx.tcx())); - let (_, variant_cx) = invoke(variant_cx, dtor_addr, args[], dtor_ty, None); + let (_, variant_cx) = invoke(variant_cx, dtor_addr, args.index(&FullRange), dtor_ty, None); variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope); variant_cx @@ -352,7 +354,7 @@ fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: (Mul(bcx, info, C_uint(bcx.ccx(), unit_size)), C_uint(bcx.ccx(), 8u)) } _ => bcx.sess().bug(format!("Unexpected unsized type, found {}", - bcx.ty_to_string(t))[]) + bcx.ty_to_string(t)).index(&FullRange)) } } @@ -424,7 +426,7 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>) bcx.sess().warn(format!("Ignoring drop flag in destructor for {}\ because the struct is unsized. See issue\ #16758", - bcx.ty_to_string(t))[]); + bcx.ty_to_string(t)).index(&FullRange)); trans_struct_drop(bcx, t, v0, dtor, did, substs) } } @@ -494,7 +496,7 @@ pub fn declare_tydesc<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) note_unique_llvm_symbol(ccx, name); let ty_name = token::intern_and_get_ident( - ppaux::ty_to_string(ccx.tcx(), t)[]); + ppaux::ty_to_string(ccx.tcx(), t).index(&FullRange)); let ty_name = C_str_slice(ccx, ty_name); debug!("--- declare_tydesc {}", ppaux::ty_to_string(ccx.tcx(), t)); @@ -513,8 +515,8 @@ fn declare_generic_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, let fn_nm = mangle_internal_name_by_type_and_seq( ccx, t, - format!("glue_{}", name)[]); - let llfn = decl_cdecl_fn(ccx, fn_nm[], llfnty, ty::mk_nil(ccx.tcx())); + format!("glue_{}", name).index(&FullRange)); + let llfn = decl_cdecl_fn(ccx, fn_nm.index(&FullRange), llfnty, ty::mk_nil(ccx.tcx())); note_unique_llvm_symbol(ccx, fn_nm.clone()); return (fn_nm, llfn); } diff --git a/src/librustc_trans/trans/intrinsic.rs b/src/librustc_trans/trans/intrinsic.rs index 6e71653891181..ed75445b9939b 100644 --- a/src/librustc_trans/trans/intrinsic.rs +++ b/src/librustc_trans/trans/intrinsic.rs @@ -150,14 +150,16 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, dest: expr::Dest, substs: subst::Substs<'tcx>, call_info: NodeInfo) - -> Result<'blk, 'tcx> { - + -> Result<'blk, 'tcx> +{ let fcx = bcx.fcx; let ccx = fcx.ccx; let tcx = bcx.tcx(); let ret_ty = match callee_ty.sty { - ty::ty_bare_fn(_, ref f) => f.sig.0.output, + ty::ty_bare_fn(_, ref f) => { + ty::erase_late_bound_regions(bcx.tcx(), &f.sig.output()) + } _ => panic!("expected bare_fn in trans_intrinsic_call") }; let foreign_item = tcx.map.expect_foreign_item(node); diff --git a/src/librustc_trans/trans/meth.rs b/src/librustc_trans/trans/meth.rs index f6d69959dadf9..c618889f3f5dc 100644 --- a/src/librustc_trans/trans/meth.rs +++ b/src/librustc_trans/trans/meth.rs @@ -77,7 +77,7 @@ pub fn trans_impl(ccx: &CrateContext, match *impl_item { ast::MethodImplItem(ref method) => { if method.pe_generics().ty_params.len() == 0u { - let trans_everywhere = attr::requests_inline(method.attrs[]); + let trans_everywhere = attr::requests_inline(method.attrs.index(&FullRange)); for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) { let llfn = get_item_val(ccx, method.id); trans_fn(ccx, @@ -135,7 +135,7 @@ pub fn trans_method_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, }) => { let trait_ref = ty::Binder(bcx.monomorphize(trait_ref)); let span = bcx.tcx().map.span(method_call.expr_id); - debug!("method_call={} trait_ref={}", + debug!("method_call={:?} trait_ref={}", method_call, trait_ref.repr(bcx.tcx())); let origin = fulfill_obligation(bcx.ccx(), @@ -177,7 +177,7 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let _icx = push_ctxt("meth::trans_static_method_callee"); let tcx = ccx.tcx(); - debug!("trans_static_method_callee(method_id={}, trait_id={}, \ + debug!("trans_static_method_callee(method_id={:?}, trait_id={}, \ expr_id={})", method_id, ty::item_path_str(tcx, trait_id), @@ -201,7 +201,7 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, } else { csearch::get_item_path(tcx, method_id).last().unwrap().name() }; - debug!("trans_static_method_callee: method_id={}, expr_id={}, \ + debug!("trans_static_method_callee: method_id={:?}, expr_id={}, \ name={}", method_id, expr_id, token::get_name(mname)); // Find the substitutions for the fn itself. This includes @@ -229,7 +229,7 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // Here, in this call, which I've written with explicit UFCS // notation, the set of type parameters will be: // - // rcvr_type: [] <-- nothing declared on the trait itself + // rcvr_type: .index(&FullRange) <-- nothing declared on the trait itself // rcvr_self: [Vec] <-- the self type // rcvr_method: [String] <-- method type parameter // @@ -268,11 +268,11 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // // Recall that we matched ` as Convert>`. Trait // resolution will have given us a substitution - // containing `impl_substs=[[T=int],[],[]]` (the type + // containing `impl_substs=[[T=int],.index(&FullRange),.index(&FullRange)]` (the type // parameters defined on the impl). We combine // that with the `rcvr_method` from before, which tells us // the type parameters from the *method*, to yield - // `callee_substs=[[T=int],[],[U=String]]`. + // `callee_substs=[[T=int],.index(&FullRange),[U=String]]`. let subst::SeparateVecsPerParamSpace { types: impl_type, selfs: impl_self, @@ -290,7 +290,7 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, } _ => { tcx.sess.bug(format!("static call to invalid vtable: {}", - vtbl.repr(tcx))[]); + vtbl.repr(tcx)).index(&FullRange)); } } } @@ -378,7 +378,7 @@ fn trans_monomorphized_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, traits::VtableParam(..) => { bcx.sess().bug( format!("resolved vtable bad vtable {} in trans", - vtable.repr(bcx.tcx()))[]); + vtable.repr(bcx.tcx())).index(&FullRange)); } } } @@ -477,13 +477,19 @@ pub fn trans_trait_callee_from_llval<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Load the function from the vtable and cast it to the expected type. debug!("(translating trait callee) loading method"); + // Replace the self type (&Self or Box) with an opaque pointer. let llcallee_ty = match callee_ty.sty { ty::ty_bare_fn(_, ref f) if f.abi == Rust || f.abi == RustCall => { + let fake_sig = + ty::Binder(ty::FnSig { + inputs: f.sig.0.inputs.slice_from(1).to_vec(), + output: f.sig.0.output, + variadic: f.sig.0.variadic, + }); type_of_rust_fn(ccx, Some(Type::i8p(ccx)), - f.sig.0.inputs.slice_from(1), - f.sig.0.output, + &fake_sig, f.abi) } _ => { @@ -557,7 +563,8 @@ pub fn trans_object_shim<'a, 'tcx>( // Upcast to the trait in question and extract out the substitutions. let upcast_trait_ref = traits::upcast(ccx.tcx(), object_trait_ref.clone(), trait_id).unwrap(); - let object_substs = upcast_trait_ref.substs().clone().erase_regions(); + let upcast_trait_ref = ty::erase_late_bound_regions(tcx, &upcast_trait_ref); + let object_substs = upcast_trait_ref.substs.clone().erase_regions(); debug!("trans_object_shim: object_substs={}", object_substs.repr(tcx)); // Lookup the type of this method as deeclared in the trait and apply substitutions. @@ -579,6 +586,8 @@ pub fn trans_object_shim<'a, 'tcx>( let llfn = decl_internal_rust_fn(ccx, method_bare_fn_ty, function_name.as_slice()); + let sig = ty::erase_late_bound_regions(ccx.tcx(), &fty.sig); + // let block_arena = TypedArena::new(); let empty_substs = Substs::trans_empty(); @@ -586,11 +595,11 @@ pub fn trans_object_shim<'a, 'tcx>( llfn, ast::DUMMY_NODE_ID, false, - fty.sig.0.output, + sig.output, &empty_substs, None, &block_arena); - let mut bcx = init_function(&fcx, false, fty.sig.0.output); + let mut bcx = init_function(&fcx, false, sig.output); // the first argument (`self`) will be a trait object let llobject = get_param(fcx.llfn, fcx.arg_pos(0) as u32); @@ -603,18 +612,18 @@ pub fn trans_object_shim<'a, 'tcx>( match fty.abi { RustCall => { // unpack the tuple to extract the input type arguments: - match fty.sig.0.inputs[1].sty { + match sig.inputs[1].sty { ty::ty_tup(ref tys) => tys.as_slice(), _ => { bcx.sess().bug( format!("rust-call expects a tuple not {}", - fty.sig.0.inputs[1].repr(tcx)).as_slice()); + sig.inputs[1].repr(tcx)).as_slice()); } } } _ => { // skip the self parameter: - fty.sig.0.inputs.slice_from(1) + sig.inputs.slice_from(1) } }; @@ -631,9 +640,12 @@ pub fn trans_object_shim<'a, 'tcx>( assert!(!fcx.needs_ret_allocas); + let sig = + ty::erase_late_bound_regions(bcx.tcx(), &fty.sig); + let dest = fcx.llretslotptr.get().map( - |_| expr::SaveIn(fcx.get_ret_slot(bcx, fty.sig.0.output, "ret_slot"))); + |_| expr::SaveIn(fcx.get_ret_slot(bcx, sig.output, "ret_slot"))); let method_offset_in_vtable = traits::get_vtable_index_of_object_method(bcx.tcx(), @@ -653,7 +665,7 @@ pub fn trans_object_shim<'a, 'tcx>( ArgVals(llargs.as_slice()), dest).bcx; - finish_fn(&fcx, bcx, fty.sig.0.output); + finish_fn(&fcx, bcx, sig.output); llfn } @@ -728,7 +740,7 @@ pub fn get_vtable<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.sess().bug( format!("resolved vtable for {} to bad vtable {} in trans", trait_ref.repr(bcx.tcx()), - vtable.repr(bcx.tcx()))[]); + vtable.repr(bcx.tcx())).index(&FullRange)); } } }); @@ -760,7 +772,7 @@ pub fn make_vtable>(ccx: &CrateContext, let components: Vec<_> = head.into_iter().chain(ptrs).collect(); unsafe { - let tbl = C_struct(ccx, components[], false); + let tbl = C_struct(ccx, components.index(&FullRange), false); let sym = token::gensym("vtable"); let buf = CString::from_vec(format!("vtable{}", sym.uint()).into_bytes()); let vt_gvar = llvm::LLVMAddGlobal(ccx.llmod(), val_ty(tbl).to_ref(), diff --git a/src/librustc_trans/trans/mod.rs b/src/librustc_trans/trans/mod.rs index fa9cd5a698bbe..4213478954695 100644 --- a/src/librustc_trans/trans/mod.rs +++ b/src/librustc_trans/trans/mod.rs @@ -16,8 +16,7 @@ pub use self::base::trans_crate; pub use self::context::CrateContext; pub use self::common::gensym_name; -#[cfg_attr(stage0, macro_escape)] -#[cfg_attr(not(stage0), macro_use)] +#[macro_use] mod macros; mod doc; diff --git a/src/librustc_trans/trans/monomorphize.rs b/src/librustc_trans/trans/monomorphize.rs index e6db462a342da..e2594765f4fda 100644 --- a/src/librustc_trans/trans/monomorphize.rs +++ b/src/librustc_trans/trans/monomorphize.rs @@ -42,7 +42,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, debug!("monomorphic_fn(\ fn_id={}, \ real_substs={}, \ - ref_id={})", + ref_id={:?})", fn_id.repr(ccx.tcx()), psubsts.repr(ccx.tcx()), ref_id); @@ -73,7 +73,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, debug!("monomorphic_fn(\ fn_id={}, \ psubsts={}, \ - hash_id={})", + hash_id={:?})", fn_id.repr(ccx.tcx()), psubsts.repr(ccx.tcx()), hash_id); @@ -83,7 +83,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ccx.sess(), ccx.tcx().map.find(fn_id.node), || { - format!("while monomorphizing {}, couldn't find it in \ + format!("while monomorphizing {:?}, couldn't find it in \ the item map (may have attempted to monomorphize \ an item defined in a different crate?)", fn_id) @@ -131,7 +131,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, hash = format!("h{}", state.result()); ccx.tcx().map.with_path(fn_id.node, |path| { - exported_name(path, hash[]) + exported_name(path, hash.index(&FullRange)) }) }; @@ -141,9 +141,9 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let mut hash_id = Some(hash_id); let mut mk_lldecl = |&mut : abi: abi::Abi| { let lldecl = if abi != abi::Rust { - foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, s[]) + foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, s.index(&FullRange)) } else { - decl_internal_rust_fn(ccx, mono_ty, s[]) + decl_internal_rust_fn(ccx, mono_ty, s.index(&FullRange)) }; ccx.monomorphized().borrow_mut().insert(hash_id.take().unwrap(), lldecl); @@ -177,12 +177,12 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, .. } => { let d = mk_lldecl(abi); - let needs_body = setup_lldecl(d, i.attrs[]); + let needs_body = setup_lldecl(d, i.attrs.index(&FullRange)); if needs_body { if abi != abi::Rust { foreign::trans_rust_fn_with_foreign_abi( ccx, &**decl, &**body, &[], d, psubsts, fn_id.node, - Some(hash[])); + Some(hash.index(&FullRange))); } else { trans_fn(ccx, &**decl, &**body, d, psubsts, fn_id.node, &[]); } @@ -206,7 +206,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, trans_enum_variant(ccx, parent, &*v, - args[], + args.index(&FullRange), this_tv.disr_val, psubsts, d); @@ -220,7 +220,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, match *ii { ast::MethodImplItem(ref mth) => { let d = mk_lldecl(abi::Rust); - let needs_body = setup_lldecl(d, mth.attrs[]); + let needs_body = setup_lldecl(d, mth.attrs.index(&FullRange)); if needs_body { trans_fn(ccx, mth.pe_fn_decl(), @@ -241,7 +241,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, match *method { ast::ProvidedMethod(ref mth) => { let d = mk_lldecl(abi::Rust); - let needs_body = setup_lldecl(d, mth.attrs[]); + let needs_body = setup_lldecl(d, mth.attrs.index(&FullRange)); if needs_body { trans_fn(ccx, mth.pe_fn_decl(), mth.pe_body(), d, psubsts, mth.id, &[]); @@ -249,8 +249,8 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, d } _ => { - ccx.sess().bug(format!("can't monomorphize a {}", - map_node)[]) + ccx.sess().bug(format!("can't monomorphize a {:?}", + map_node).index(&FullRange)) } } } @@ -258,7 +258,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let d = mk_lldecl(abi::Rust); set_inline_hint(d); base::trans_tuple_struct(ccx, - struct_def.fields[], + struct_def.fields.index(&FullRange), struct_def.ctor_id.expect("ast-mapped tuple struct \ didn't have a ctor id"), psubsts, @@ -275,8 +275,8 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ast_map::NodeBlock(..) | ast_map::NodePat(..) | ast_map::NodeLocal(..) => { - ccx.sess().bug(format!("can't monomorphize a {}", - map_node)[]) + ccx.sess().bug(format!("can't monomorphize a {:?}", + map_node).index(&FullRange)) } }; diff --git a/src/librustc_trans/trans/type_.rs b/src/librustc_trans/trans/type_.rs index 3785c2fb9bc54..66e27ed1188f3 100644 --- a/src/librustc_trans/trans/type_.rs +++ b/src/librustc_trans/trans/type_.rs @@ -103,7 +103,7 @@ impl Type { } pub fn int(ccx: &CrateContext) -> Type { - match ccx.tcx().sess.target.target.target_word_size[] { + match ccx.tcx().sess.target.target.target_word_size.index(&FullRange) { "32" => Type::i32(ccx), "64" => Type::i64(ccx), tws => panic!("Unsupported target word size for int: {}", tws), @@ -112,7 +112,7 @@ impl Type { pub fn int_from_ty(ccx: &CrateContext, t: ast::IntTy) -> Type { match t { - ast::TyI => ccx.int_type(), + ast::TyIs => ccx.int_type(), ast::TyI8 => Type::i8(ccx), ast::TyI16 => Type::i16(ccx), ast::TyI32 => Type::i32(ccx), @@ -122,7 +122,7 @@ impl Type { pub fn uint_from_ty(ccx: &CrateContext, t: ast::UintTy) -> Type { match t { - ast::TyU => ccx.int_type(), + ast::TyUs => ccx.int_type(), ast::TyU8 => Type::i8(ccx), ast::TyU16 => Type::i16(ccx), ast::TyU32 => Type::i32(ccx), diff --git a/src/librustc_trans/trans/type_of.rs b/src/librustc_trans/trans/type_of.rs index 3e499ea8498fb..19d50cdd48320 100644 --- a/src/librustc_trans/trans/type_of.rs +++ b/src/librustc_trans/trans/type_of.rs @@ -17,7 +17,7 @@ use trans::adt; use trans::common::*; use trans::foreign; use trans::machine; -use middle::ty::{self, Ty}; +use middle::ty::{self, RegionEscape, Ty}; use util::ppaux; use util::ppaux::Repr; @@ -99,18 +99,21 @@ pub fn untuple_arguments_if_necessary<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, pub fn type_of_rust_fn<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, llenvironment_type: Option, - inputs: &[Ty<'tcx>], - output: ty::FnOutput<'tcx>, + sig: &ty::Binder>, abi: abi::Abi) - -> Type { + -> Type +{ + let sig = ty::erase_late_bound_regions(cx.tcx(), sig); + assert!(!sig.variadic); // rust fns are never variadic + let mut atys: Vec = Vec::new(); // First, munge the inputs, if this has the `rust-call` ABI. - let inputs = untuple_arguments_if_necessary(cx, inputs, abi); + let inputs = untuple_arguments_if_necessary(cx, sig.inputs.as_slice(), abi); // Arg 0: Output pointer. // (if the output type is non-immediate) - let lloutputtype = match output { + let lloutputtype = match sig.output { ty::FnConverging(output) => { let use_out_pointer = return_uses_outptr(cx, output); let lloutputtype = arg_type_of(cx, output); @@ -137,7 +140,7 @@ pub fn type_of_rust_fn<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let input_tys = inputs.iter().map(|&arg_ty| type_of_explicit_arg(cx, arg_ty)); atys.extend(input_tys); - Type::func(atys[], &lloutputtype) + Type::func(atys.index(&FullRange), &lloutputtype) } // Given a function type and a count of ty params, construct an llvm type @@ -147,11 +150,7 @@ pub fn type_of_fn_from_ty<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, fty: Ty<'tcx>) // FIXME(#19925) once fn item types are // zero-sized, we'll need to do something here if f.abi == abi::Rust || f.abi == abi::RustCall { - type_of_rust_fn(cx, - None, - f.sig.0.inputs.as_slice(), - f.sig.0.output, - f.abi) + type_of_rust_fn(cx, None, &f.sig, f.abi) } else { foreign::lltype_for_foreign_fn(cx, fty) } @@ -182,7 +181,7 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ let llsizingty = match t.sty { _ if !lltype_is_sized(cx.tcx(), t) => { cx.sess().bug(format!("trying to take the sizing type of {}, an unsized type", - ppaux::ty_to_string(cx.tcx(), t))[]) + ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange)) } ty::ty_bool => Type::bool(cx), @@ -235,7 +234,7 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ ty::ty_projection(..) | ty::ty_infer(..) | ty::ty_param(..) | ty::ty_err(..) => { cx.sess().bug(format!("fictitious type {} in sizing_type_of()", - ppaux::ty_to_string(cx.tcx(), t))[]) + ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange)) } ty::ty_vec(_, None) | ty::ty_trait(..) | ty::ty_str => panic!("unreachable") }; @@ -264,7 +263,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { } match unsized_part_of_type(cx.tcx(), t).sty { - ty::ty_str | ty::ty_vec(..) => Type::uint_from_ty(cx, ast::TyU), + ty::ty_str | ty::ty_vec(..) => Type::uint_from_ty(cx, ast::TyUs), ty::ty_trait(_) => Type::vtable_ptr(cx), _ => panic!("Unexpected type returned from unsized_part_of_type : {}", t.repr(cx.tcx())) @@ -277,18 +276,20 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { None => () } - debug!("type_of {} {}", t.repr(cx.tcx()), t.sty); + debug!("type_of {} {:?}", t.repr(cx.tcx()), t.sty); + + assert!(!t.has_escaping_regions()); // Replace any typedef'd types with their equivalent non-typedef // type. This ensures that all LLVM nominal types that contain // Rust types are defined as the same LLVM types. If we don't do // this then, e.g. `Option<{myfield: bool}>` would be a different // type than `Option`. - let t_norm = ty::normalize_ty(cx.tcx(), t); + let t_norm = normalize_ty(cx.tcx(), t); if t != t_norm { let llty = type_of(cx, t_norm); - debug!("--> normalized {} {} to {} {} llty={}", + debug!("--> normalized {} {:?} to {} {:?} llty={}", t.repr(cx.tcx()), t, t_norm.repr(cx.tcx()), @@ -312,7 +313,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { let repr = adt::represent_type(cx, t); let tps = substs.types.get_slice(subst::TypeSpace); let name = llvm_type_name(cx, an_enum, did, tps); - adt::incomplete_type_of(cx, &*repr, name[]) + adt::incomplete_type_of(cx, &*repr, name.index(&FullRange)) } ty::ty_unboxed_closure(did, _, ref substs) => { // Only create the named struct, but don't fill it in. We @@ -323,7 +324,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { // contents of the VecPerParamSpace to to construct the llvm // name let name = llvm_type_name(cx, an_unboxed_closure, did, substs.types.as_slice()); - adt::incomplete_type_of(cx, &*repr, name[]) + adt::incomplete_type_of(cx, &*repr, name.index(&FullRange)) } ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) | ty::ty_ptr(ty::mt{ty, ..}) => { @@ -379,7 +380,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { let repr = adt::represent_type(cx, t); let tps = substs.types.get_slice(subst::TypeSpace); let name = llvm_type_name(cx, a_struct, did, tps); - adt::incomplete_type_of(cx, &*repr, name[]) + adt::incomplete_type_of(cx, &*repr, name.index(&FullRange)) } } @@ -398,7 +399,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { } ty::ty_trait(..) => Type::opaque_trait(cx), _ => cx.sess().bug(format!("ty_open with sized type: {}", - ppaux::ty_to_string(cx.tcx(), t))[]) + ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange)) }, ty::ty_infer(..) => cx.sess().bug("type_of with ty_infer"), @@ -407,7 +408,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { ty::ty_err(..) => cx.sess().bug("type_of with ty_err"), }; - debug!("--> mapped t={} {} to llty={}", + debug!("--> mapped t={} {:?} to llty={}", t.repr(cx.tcx()), t, cx.tn().type_to_string(llty)); @@ -457,7 +458,7 @@ pub fn llvm_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let tstr = if strings.is_empty() { base } else { - format!("{}<{}>", base, strings) + format!("{}<{:?}>", base, strings) }; if did.krate == 0 { diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 183b8c5c830ef..265ebe00d539f 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -197,7 +197,7 @@ pub fn opt_ast_region_to_region<'tcx>( help_name } else { format!("one of {}'s {} elided lifetimes", help_name, n) - }[]); + }.index(&FullRange)); if len == 2 && i == 0 { m.push_str(" or "); @@ -347,7 +347,7 @@ fn create_substs_for_ast_path<'tcx>( format!("wrong number of type arguments: {} {}, found {}", expected, required_ty_param_count, - supplied_ty_param_count)[]); + supplied_ty_param_count).index(&FullRange)); } else if supplied_ty_param_count > formal_ty_param_count { let expected = if required_ty_param_count < formal_ty_param_count { "expected at most" @@ -358,7 +358,7 @@ fn create_substs_for_ast_path<'tcx>( format!("wrong number of type arguments: {} {}, found {}", expected, formal_ty_param_count, - supplied_ty_param_count)[]); + supplied_ty_param_count).index(&FullRange)); } let mut substs = Substs::new_type(types, regions); @@ -377,7 +377,7 @@ fn create_substs_for_ast_path<'tcx>( } } - for param in ty_param_defs[supplied_ty_param_count..].iter() { + for param in ty_param_defs.index(&(supplied_ty_param_count..)).iter() { match param.default { Some(default) => { // This is a default type parameter. @@ -556,7 +556,8 @@ pub fn instantiate_trait_ref<'tcx>( _ => { this.tcx().sess.span_fatal( ast_trait_ref.path.span, - format!("`{}` is not a trait", ast_trait_ref.path.user_string(this.tcx()))[]); + format!("`{}` is not a trait", + ast_trait_ref.path.user_string(this.tcx())).index(&FullRange)); } } } @@ -570,7 +571,7 @@ fn ast_path_to_trait_ref<'a,'tcx>( mut projections: Option<&mut Vec>>) -> Rc> { - debug!("ast_path_to_trait_ref {}", path); + debug!("ast_path_to_trait_ref {:?}", path); let trait_def = this.get_trait_def(trait_def_id); // the trait reference introduces a binding level here, so @@ -825,7 +826,7 @@ pub fn ast_ty_to_builtin_ty<'tcx>( .sess .span_bug(ast_ty.span, format!("unbound path {}", - path.repr(this.tcx()))[]) + path.repr(this.tcx())).index(&FullRange)) } Some(&d) => d }; @@ -847,7 +848,7 @@ pub fn ast_ty_to_builtin_ty<'tcx>( this.tcx().sess.span_bug( path.span, format!("converting `Box` to `{}`", - ty.repr(this.tcx()))[]); + ty.repr(this.tcx())).index(&FullRange)); } } } @@ -1067,10 +1068,14 @@ pub fn ast_ty_to_ty<'tcx>( ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None) } ast::TyObjectSum(ref ty, ref bounds) => { - match ast_ty_to_trait_ref(this, rscope, &**ty, bounds[]) { + match ast_ty_to_trait_ref(this, rscope, &**ty, bounds.index(&FullRange)) { Ok((trait_ref, projection_bounds)) => { - trait_ref_to_object_type(this, rscope, ast_ty.span, - trait_ref, projection_bounds, bounds[]) + trait_ref_to_object_type(this, + rscope, + ast_ty.span, + trait_ref, + projection_bounds, + bounds.index(&FullRange)) } Err(ErrorReported) => { this.tcx().types.err @@ -1105,7 +1110,7 @@ pub fn ast_ty_to_ty<'tcx>( ty::mk_bare_fn(tcx, None, tcx.mk_bare_fn(bare_fn)) } ast::TyPolyTraitRef(ref bounds) => { - conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds[]) + conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds.index(&FullRange)) } ast::TyPath(ref path, id) => { let a_def = match tcx.def_map.borrow().get(&id) { @@ -1113,7 +1118,7 @@ pub fn ast_ty_to_ty<'tcx>( tcx.sess .span_bug(ast_ty.span, format!("unbound path {}", - path.repr(tcx))[]) + path.repr(tcx)).index(&FullRange)) } Some(&d) => d }; @@ -1152,7 +1157,7 @@ pub fn ast_ty_to_ty<'tcx>( def::DefMod(id) => { tcx.sess.span_fatal(ast_ty.span, format!("found module name used as a type: {}", - tcx.map.node_to_string(id.node))[]); + tcx.map.node_to_string(id.node)).index(&FullRange)); } def::DefPrimTy(_) => { panic!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call"); @@ -1171,7 +1176,7 @@ pub fn ast_ty_to_ty<'tcx>( .last() .unwrap() .identifier) - .get())[]); + .get()).index(&FullRange)); this.tcx().types.err } def::DefAssociatedPath(provenance, assoc_ident) => { @@ -1180,8 +1185,8 @@ pub fn ast_ty_to_ty<'tcx>( _ => { tcx.sess.span_fatal(ast_ty.span, format!("found value name used \ - as a type: {}", - a_def)[]); + as a type: {:?}", + a_def).index(&FullRange)); } } } @@ -1209,7 +1214,7 @@ pub fn ast_ty_to_ty<'tcx>( ast_ty.span, format!("expected constant expr for array \ length: {}", - *r)[]); + *r).index(&FullRange)); } } } @@ -1331,7 +1336,7 @@ fn ty_of_method_or_bare_fn<'a, 'tcx>(this: &AstConv<'tcx>, let input_params = if self_ty.is_some() { decl.inputs.slice_from(1) } else { - decl.inputs[] + decl.inputs.index(&FullRange) }; let input_tys = input_params.iter().map(|a| ty_of_arg(this, &rb, a, None)); let input_pats: Vec = input_params.iter() @@ -1546,7 +1551,7 @@ fn conv_ty_poly_trait_ref<'tcx>( ast_bounds: &[ast::TyParamBound]) -> Ty<'tcx> { - let mut partitioned_bounds = partition_bounds(this.tcx(), span, ast_bounds[]); + let mut partitioned_bounds = partition_bounds(this.tcx(), span, ast_bounds.index(&FullRange)); let mut projection_bounds = Vec::new(); let main_trait_bound = if !partitioned_bounds.trait_bounds.is_empty() { @@ -1596,7 +1601,7 @@ pub fn conv_existential_bounds_from_partitioned_bounds<'tcx>( this.tcx().sess.span_err( b.trait_ref.path.span, format!("only the builtin traits can be used \ - as closure or object bounds")[]); + as closure or object bounds").index(&FullRange)); } let region_bound = compute_region_bound(this, @@ -1626,7 +1631,7 @@ fn compute_opt_region_bound<'tcx>(tcx: &ty::ctxt<'tcx>, builtin_bounds: ty::BuiltinBounds) -> Option { - debug!("compute_opt_region_bound(explicit_region_bounds={}, \ + debug!("compute_opt_region_bound(explicit_region_bounds={:?}, \ principal_trait_ref={}, builtin_bounds={})", explicit_region_bounds, principal_trait_ref.repr(tcx), @@ -1669,7 +1674,7 @@ fn compute_opt_region_bound<'tcx>(tcx: &ty::ctxt<'tcx>, tcx.sess.span_err( span, format!("ambiguous lifetime bound, \ - explicit lifetime bound required")[]); + explicit lifetime bound required").index(&FullRange)); } return Some(r); } @@ -1695,7 +1700,7 @@ fn compute_region_bound<'tcx>( None => { this.tcx().sess.span_err( span, - format!("explicit lifetime bound required")[]); + format!("explicit lifetime bound required").index(&FullRange)); ty::ReStatic } } diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index 80e7e70605956..363ce459b3fe9 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -195,7 +195,7 @@ pub fn check_pat<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, ast::PatRegion(ref inner, mutbl) => { let inner_ty = fcx.infcx().next_ty_var(); - // SNAP b2085d9 remove this `if`-`else` entirely after next snapshot + // SNAP 340ac04 remove this `if`-`else` entirely after next snapshot let mutbl = if mutbl == ast::MutImmutable { ty::deref(fcx.infcx().shallow_resolve(expected), true) .map(|mt| mt.mutbl).unwrap_or(ast::MutImmutable) @@ -505,9 +505,10 @@ pub fn check_pat_enum<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, pat: &ast::Pat, let ctor_scheme = ty::lookup_item_type(tcx, enum_def); let path_scheme = if ty::is_fn_ty(ctor_scheme.ty) { + let fn_ret = ty::assert_no_late_bound_regions(tcx, &ty::ty_fn_ret(ctor_scheme.ty)); ty::TypeScheme { - ty: ty::ty_fn_ret(ctor_scheme.ty).unwrap(), - ..ctor_scheme + ty: fn_ret.unwrap(), + generics: ctor_scheme.generics, } } else { ctor_scheme @@ -603,7 +604,7 @@ pub fn check_struct_pat_fields<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, // Typecheck each field. for &Spanned { node: ref field, span } in fields.iter() { - let field_type = match used_fields.entry(&field.ident.name) { + let field_type = match used_fields.entry(field.ident.name) { Occupied(occupied) => { span_err!(tcx.sess, span, E0025, "field `{}` bound multiple times in the pattern", diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index b1dc033b567bb..dff216ac2935f 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -195,7 +195,7 @@ fn confirm_builtin_call<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, let arg_exprs: Vec<_> = arg_exprs.iter().collect(); // for some weird reason we take &[&P<...>]. check_argument_types(fcx, call_expr.span, - fn_sig.inputs[], + fn_sig.inputs.as_slice(), arg_exprs.as_slice(), AutorefArgs::No, fn_sig.variadic, diff --git a/src/librustc_typeck/check/closure.rs b/src/librustc_typeck/check/closure.rs index 9945e264bfc2a..0079115756993 100644 --- a/src/librustc_typeck/check/closure.rs +++ b/src/librustc_typeck/check/closure.rs @@ -82,7 +82,7 @@ fn check_unboxed_closure<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expected_sig: Option>) { let expr_def_id = ast_util::local_def(expr.id); - debug!("check_unboxed_closure kind={} expected_sig={}", + debug!("check_unboxed_closure kind={:?} expected_sig={}", kind, expected_sig.repr(fcx.tcx())); @@ -134,7 +134,7 @@ fn check_unboxed_closure<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, // the `unboxed_closures` table. fn_ty.sig.0.inputs = vec![ty::mk_tup(fcx.tcx(), fn_ty.sig.0.inputs)]; - debug!("unboxed_closure for {} --> sig={} kind={}", + debug!("unboxed_closure for {} --> sig={} kind={:?}", expr_def_id.repr(fcx.tcx()), fn_ty.sig.repr(fcx.tcx()), kind); @@ -186,7 +186,7 @@ fn deduce_unboxed_closure_expectations_from_trait_ref<'a,'tcx>( None => { return None; } }; - debug!("found object type {}", kind); + debug!("found object type {:?}", kind); let arg_param_ty = *trait_ref.substs().types.get(subst::TypeSpace, 0); let arg_param_ty = fcx.infcx().resolve_type_vars_if_possible(&arg_param_ty); diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs index adea5084aab2b..2471bc85bc938 100644 --- a/src/librustc_typeck/check/method/confirm.rs +++ b/src/librustc_typeck/check/method/confirm.rs @@ -200,7 +200,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { match pick.kind { probe::InherentImplPick(impl_def_id) => { assert!(ty::impl_trait_ref(self.tcx(), impl_def_id).is_none(), - "impl {} is not an inherent impl", impl_def_id); + "impl {:?} is not an inherent impl", impl_def_id); let impl_polytype = check::impl_self_ty(self.fcx, self.span, impl_def_id); (impl_polytype.substs, MethodStatic(pick.method_ty.def_id)) @@ -315,7 +315,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { self.tcx().sess.span_bug( self.span, format!("self-type `{}` for ObjectPick never dereferenced to an object", - self_ty.repr(self.tcx()))[]) + self_ty.repr(self.tcx())).index(&FullRange)) } } } @@ -370,7 +370,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { format!( "{} was a subtype of {} but now is not?", self_ty.repr(self.tcx()), - method_self_ty.repr(self.tcx()))[]); + method_self_ty.repr(self.tcx())).index(&FullRange)); } } } @@ -562,6 +562,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { self.fcx.adjust_expr_ty( &**base_expr, Some(&ty::AdjustDerefRef(base_adjustment.clone()))); + let index_expr_ty = self.fcx.expr_ty(&**index_expr); let result = check::try_index_step( self.fcx, @@ -570,10 +571,10 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { &**base_expr, adjusted_base_ty, base_adjustment, - PreferMutLvalue); + PreferMutLvalue, + index_expr_ty); if let Some((input_ty, return_ty)) = result { - let index_expr_ty = self.fcx.expr_ty(&**index_expr); demand::suptype(self.fcx, index_expr.span, input_ty, index_expr_ty); let expr_ty = self.fcx.expr_ty(&**expr); @@ -639,7 +640,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { self.span, format!("cannot upcast `{}` to `{}`", source_trait_ref.repr(self.tcx()), - target_trait_def_id.repr(self.tcx()))[]); + target_trait_def_id.repr(self.tcx())).as_slice()); } } } diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs index ad43dd84ef6b2..87ea082b6b20b 100644 --- a/src/librustc_typeck/check/method/mod.rs +++ b/src/librustc_typeck/check/method/mod.rs @@ -231,7 +231,7 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>, Some(self_expr) => { debug!("lookup_in_trait_adjusted: inserting adjustment if needed \ - (self-id={}, base adjustment={}, explicit_self={})", + (self-id={}, base adjustment={:?}, explicit_self={:?})", self_expr.id, autoderefref, method_ty.explicit_self); match method_ty.explicit_self { @@ -266,7 +266,7 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>, span, format!( "trait method is &self but first arg is: {}", - transformed_self_ty.repr(fcx.tcx()))[]); + transformed_self_ty.repr(fcx.tcx())).index(&FullRange)); } } } @@ -275,8 +275,8 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>, fcx.tcx().sess.span_bug( span, format!( - "unexpected explicit self type in operator method: {}", - method_ty.explicit_self)[]); + "unexpected explicit self type in operator method: {:?}", + method_ty.explicit_self).index(&FullRange)); } } } @@ -330,7 +330,7 @@ pub fn report_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, if is_field { cx.sess.span_note(span, format!("use `(s.{0})(...)` if you meant to call the \ - function stored in the `{0}` field", method_ustring)[]); + function stored in the `{0}` field", method_ustring).index(&FullRange)); } if static_sources.len() > 0 { diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index 115711ae92b29..4ba161fa83563 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -267,7 +267,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { return; // already visited } - debug!("assemble_inherent_impl_probe {}", impl_def_id); + debug!("assemble_inherent_impl_probe {:?}", impl_def_id); let method = match impl_method(self.tcx(), impl_def_id, self.method_name) { Some(m) => m, @@ -576,7 +576,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self.tcx().sess.span_bug( self.span, format!("No entry for unboxed closure: {}", - closure_def_id.repr(self.tcx()))[]); + closure_def_id.repr(self.tcx())).index(&FullRange)); } }; @@ -745,7 +745,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { debug!("pick_method(self_ty={})", self.infcx().ty_to_string(self_ty)); debug!("searching inherent candidates"); - match self.consider_candidates(self_ty, self.inherent_candidates[]) { + match self.consider_candidates(self_ty, self.inherent_candidates.index(&FullRange)) { None => {} Some(pick) => { return Some(pick); @@ -753,7 +753,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { } debug!("searching extension candidates"); - self.consider_candidates(self_ty, self.extension_candidates[]) + self.consider_candidates(self_ty, self.extension_candidates.index(&FullRange)) } fn consider_candidates(&self, @@ -768,7 +768,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { debug!("applicable_candidates: {}", applicable_candidates.repr(self.tcx())); if applicable_candidates.len() > 1 { - match self.collapse_candidates_to_trait_pick(applicable_candidates[]) { + match self.collapse_candidates_to_trait_pick(applicable_candidates.index(&FullRange)) { Some(pick) => { return Some(Ok(pick)); } None => { } } @@ -864,7 +864,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { Some(data) => data, None => return None, }; - if probes[1..].iter().any(|p| p.to_trait_data() != Some(trait_data)) { + if probes.index(&(1..)).iter().any(|p| p.to_trait_data() != Some(trait_data)) { return None; } @@ -1024,7 +1024,7 @@ fn trait_method<'tcx>(tcx: &ty::ctxt<'tcx>, -> Option<(uint, Rc>)> { let trait_items = ty::trait_items(tcx, trait_def_id); - debug!("trait_method; items: {}", trait_items); + debug!("trait_method; items: {:?}", trait_items); trait_items .iter() .filter(|item| @@ -1126,7 +1126,7 @@ impl<'tcx> Repr<'tcx> for CandidateKind<'tcx> { impl<'tcx> Repr<'tcx> for CandidateStep<'tcx> { fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String { - format!("CandidateStep({},{})", + format!("CandidateStep({},{:?})", self.self_ty.repr(tcx), self.adjustment) } @@ -1134,19 +1134,19 @@ impl<'tcx> Repr<'tcx> for CandidateStep<'tcx> { impl<'tcx> Repr<'tcx> for PickAdjustment { fn repr(&self, _tcx: &ty::ctxt) -> String { - format!("{}", self) + format!("{:?}", self) } } impl<'tcx> Repr<'tcx> for PickKind<'tcx> { fn repr(&self, _tcx: &ty::ctxt) -> String { - format!("{}", self) + format!("{:?}", self) } } impl<'tcx> Repr<'tcx> for Pick<'tcx> { fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String { - format!("Pick(method_ty={}, adjustment={}, kind={})", + format!("Pick(method_ty={}, adjustment={:?}, kind={:?})", self.method_ty.repr(tcx), self.adjustment, self.kind) diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 1b51434a58cc1..9563dd45ca234 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -593,7 +593,7 @@ fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>, let tcx = ccx.tcx; let err_count_on_creation = tcx.sess.err_count(); - let arg_tys = fn_sig.inputs[]; + let arg_tys = fn_sig.inputs.index(&FullRange); let ret_ty = fn_sig.output; debug!("check_fn(arg_tys={}, ret_ty={}, fn_id={})", @@ -691,7 +691,7 @@ pub fn check_item(ccx: &CrateCtxt, it: &ast::Item) { ast::ItemEnum(ref enum_definition, _) => { check_enum_variants(ccx, it.span, - enum_definition.variants[], + enum_definition.variants.index(&FullRange), it.id); } ast::ItemFn(ref decl, _, _, _, ref body) => { @@ -989,7 +989,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, but not in the trait", token::get_name(trait_m.name), ppaux::explicit_self_category_to_str( - &impl_m.explicit_self))[]); + &impl_m.explicit_self)).index(&FullRange)); return; } (_, &ty::StaticExplicitSelfCategory) => { @@ -999,7 +999,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, but not in the impl", token::get_name(trait_m.name), ppaux::explicit_self_category_to_str( - &trait_m.explicit_self))[]); + &trait_m.explicit_self)).index(&FullRange)); return; } _ => { @@ -1360,7 +1360,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, span, format!("lifetime parameters or bounds on method `{}` do \ not match the trait declaration", - token::get_name(impl_m.name))[]); + token::get_name(impl_m.name)).index(&FullRange)); return false; } @@ -1412,7 +1412,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, from its counterpart `{}` \ declared in the trait", impl_param.name.user_string(tcx), - trait_param.name.user_string(tcx))[]); + trait_param.name.user_string(tcx)).index(&FullRange)); true } else { false @@ -1422,14 +1422,14 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, tcx.sess.span_note( span, format!("the impl is missing the following bounds: `{}`", - missing.user_string(tcx))[]); + missing.user_string(tcx)).index(&FullRange)); } if extra.len() != 0 { tcx.sess.span_note( span, format!("the impl has the following extra bounds: `{}`", - extra.user_string(tcx))[]); + extra.user_string(tcx)).index(&FullRange)); } if err { @@ -1690,7 +1690,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } pub fn tag(&self) -> String { - format!("{}", self as *const FnCtxt) + format!("{:?}", self as *const FnCtxt) } pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> { @@ -1700,7 +1700,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tcx().sess.span_bug( span, format!("no type for local variable {}", - nid)[]); + nid).index(&FullRange)); } } } @@ -1886,7 +1886,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn register_unsize_obligations(&self, span: Span, unsize: &ty::UnsizeKind<'tcx>) { - debug!("register_unsize_obligations: unsize={}", unsize); + debug!("register_unsize_obligations: unsize={:?}", unsize); match *unsize { ty::UnsizeLength(..) => {} @@ -2034,7 +2034,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(&t) => t, None => { self.tcx().sess.bug(format!("no type for expr in fcx {}", - self.tag())[]); + self.tag()).index(&FullRange)); } } } @@ -2064,7 +2064,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tcx().sess.bug( format!("no type for node {}: {} in fcx {}", id, self.tcx().map.node_to_string(id), - self.tag())[]); + self.tag()).index(&FullRange)); } } } @@ -2317,7 +2317,9 @@ fn make_overloaded_lvalue_return_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, { match method { Some(method) => { - let ref_ty = ty::ty_fn_ret(method.ty); + let ref_ty = // invoked methods have all LB regions instantiated + ty::assert_no_late_bound_regions( + fcx.tcx(), &ty::ty_fn_ret(method.ty)); match method_call { Some(method_call) => { fcx.inh.method_map.borrow_mut().insert(method_call, @@ -2377,90 +2379,6 @@ fn autoderef_for_index<'a, 'tcx, T, F>(fcx: &FnCtxt<'a, 'tcx>, } } -/// Checks for a `Slice` (or `SliceMut`) impl at the relevant level of autoderef. If it finds one, -/// installs method info and returns type of method (else None). -fn try_overloaded_slice_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - method_call: MethodCall, - expr: &ast::Expr, - base_expr: &ast::Expr, - base_ty: Ty<'tcx>, // autoderef'd type - autoderefref: ty::AutoDerefRef<'tcx>, - lvalue_pref: LvaluePreference, - start_expr: &Option>, - end_expr: &Option>) - -> Option<(Ty<'tcx>, /* index type */ - Ty<'tcx>)> /* return type */ -{ - let input_ty = fcx.infcx().next_ty_var(); - let return_ty = fcx.infcx().next_ty_var(); - - let method = match lvalue_pref { - PreferMutLvalue => { - // Try `SliceMut` first, if preferred. - match fcx.tcx().lang_items.slice_mut_trait() { - Some(trait_did) => { - let method_name = match (start_expr, end_expr) { - (&Some(_), &Some(_)) => "slice_or_fail_mut", - (&Some(_), &None) => "slice_from_or_fail_mut", - (&None, &Some(_)) => "slice_to_or_fail_mut", - (&None, &None) => "as_mut_slice_", - }; - - method::lookup_in_trait_adjusted(fcx, - expr.span, - Some(&*base_expr), - token::intern(method_name), - trait_did, - autoderefref, - base_ty, - Some(vec![input_ty, return_ty])) - } - _ => None, - } - } - NoPreference => { - // Otherwise, fall back to `Slice`. - match fcx.tcx().lang_items.slice_trait() { - Some(trait_did) => { - let method_name = match (start_expr, end_expr) { - (&Some(_), &Some(_)) => "slice_or_fail", - (&Some(_), &None) => "slice_from_or_fail", - (&None, &Some(_)) => "slice_to_or_fail", - (&None, &None) => "as_slice_", - }; - - method::lookup_in_trait_adjusted(fcx, - expr.span, - Some(&*base_expr), - token::intern(method_name), - trait_did, - autoderefref, - base_ty, - Some(vec![input_ty, return_ty])) - } - _ => None, - } - } - }; - - // If some lookup succeeded, install method in table - method.map(|method| { - let method_ty = method.ty; - make_overloaded_lvalue_return_type(fcx, Some(method_call), Some(method)); - - let result_ty = ty::ty_fn_ret(method_ty); - let result_ty = match result_ty { - ty::FnConverging(result_ty) => result_ty, - ty::FnDiverging => { - fcx.tcx().sess.span_bug(expr.span, - "slice trait does not define a `!` return") - } - }; - - (input_ty, result_ty) - }) -} - /// To type-check `base_expr[index_expr]`, we progressively autoderef (and otherwise adjust) /// `base_expr`, looking for a type which either supports builtin indexing or overloaded indexing. /// This loop implements one step in that search; the autoderef loop is implemented by @@ -2471,29 +2389,32 @@ fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, base_expr: &ast::Expr, adjusted_ty: Ty<'tcx>, adjustment: ty::AutoDerefRef<'tcx>, - lvalue_pref: LvaluePreference) + lvalue_pref: LvaluePreference, + index_ty: Ty<'tcx>) -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> { - debug!("try_index_step(expr={}, base_expr.id={}, adjusted_ty={}, adjustment={})", - expr.repr(fcx.tcx()), - base_expr.repr(fcx.tcx()), - adjusted_ty.repr(fcx.tcx()), - adjustment); - - // Try built-in indexing first. - match ty::index(adjusted_ty) { - Some(ty) => { + let tcx = fcx.tcx(); + debug!("try_index_step(expr={}, base_expr.id={}, adjusted_ty={}, adjustment={:?}, index_ty={})", + expr.repr(tcx), + base_expr.repr(tcx), + adjusted_ty.repr(tcx), + adjustment, + index_ty.repr(tcx)); + + let input_ty = fcx.infcx().next_ty_var(); + + // First, try built-in indexing. + match (ty::index(adjusted_ty), &index_ty.sty) { + (Some(ty), &ty::ty_uint(ast::TyUs)) | (Some(ty), &ty::ty_infer(ty::IntVar(_))) => { + debug!("try_index_step: success, using built-in indexing"); fcx.write_adjustment(base_expr.id, base_expr.span, ty::AdjustDerefRef(adjustment)); - return Some((fcx.tcx().types.uint, ty)); + return Some((tcx.types.uint, ty)); } - - None => { } + _ => {} } - let input_ty = fcx.infcx().next_ty_var(); - // Try `IndexMut` first, if preferred. - let method = match (lvalue_pref, fcx.tcx().lang_items.index_mut_trait()) { + let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) { (PreferMutLvalue, Some(trait_did)) => { method::lookup_in_trait_adjusted(fcx, expr.span, @@ -2508,14 +2429,14 @@ fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, }; // Otherwise, fall back to `Index`. - let method = match (method, fcx.tcx().lang_items.index_trait()) { + let method = match (method, tcx.lang_items.index_trait()) { (None, Some(trait_did)) => { method::lookup_in_trait_adjusted(fcx, expr.span, Some(&*base_expr), token::intern("index"), trait_did, - adjustment, + adjustment.clone(), adjusted_ty, Some(vec![input_ty])) } @@ -2526,6 +2447,7 @@ fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, // type from the method signature. // If some lookup succeeded, install method in table method.and_then(|method| { + debug!("try_index_step: success, using overloaded indexing"); make_overloaded_lvalue_return_type(fcx, Some(method_call), Some(method)). map(|ret| (input_ty, ret.ty)) }) @@ -2544,7 +2466,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, Ok(trait_did) => trait_did, Err(ref err_string) => { fcx.tcx().sess.span_err(iterator_expr.span, - err_string[]); + err_string.index(&FullRange)); return fcx.tcx().types.err } }; @@ -2571,7 +2493,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, format!("`for` loop expression has type `{}` which does \ not implement the `Iterator` trait; \ maybe try .iter()", - ty_string)[]); + ty_string).index(&FullRange)); } fcx.tcx().types.err } @@ -2609,7 +2531,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, format!("`next` method of the `Iterator` \ trait has an unexpected type `{}`", fcx.infcx().ty_to_string(return_type)) - []); + .index(&FullRange)); fcx.tcx().types.err } } @@ -2636,7 +2558,7 @@ fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, check_argument_types(fcx, sp, - err_inputs[], + err_inputs.index(&FullRange), args_no_rcvr, autoref_args, false, @@ -2731,7 +2653,7 @@ fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, err_args(fcx.tcx(), supplied_arg_count) }; - debug!("check_argument_types: formal_tys={}", + debug!("check_argument_types: formal_tys={:?}", formal_tys.iter().map(|t| fcx.infcx().ty_to_string(*t)).collect::>()); // Check the arguments. @@ -3088,7 +3010,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, }; // Call the generic checker. - let args: Vec<_> = args[1..].iter().map(|x| x).collect(); + let args: Vec<_> = args.index(&(1..)).iter().map(|x| x).collect(); let ret_ty = check_method_argument_types(fcx, method_name.span, fn_ty, @@ -3181,7 +3103,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, } }; - debug!("adjusted_ty={} adjustment={}", + debug!("adjusted_ty={} adjustment={:?}", adj_ty.repr(fcx.tcx()), adjustment); @@ -3406,7 +3328,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, ty::ty_struct(base_id, substs) => { debug!("struct named {}", ppaux::ty_to_string(tcx, base_t)); let fields = ty::lookup_struct_fields(tcx, base_id); - lookup_field_ty(tcx, base_id, fields[], + lookup_field_ty(tcx, base_id, fields.index(&FullRange), field.node.name, &(*substs)) } _ => None @@ -3469,7 +3391,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, if tuple_like { debug!("tuple struct named {}", ppaux::ty_to_string(tcx, base_t)); let fields = ty::lookup_struct_fields(tcx, base_id); - lookup_tup_field_ty(tcx, base_id, fields[], + lookup_tup_field_ty(tcx, base_id, fields.index(&FullRange), idx.node, &(*substs)) } else { None @@ -3634,7 +3556,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, class_id, id, fcx.ccx.tcx.mk_substs(struct_substs), - class_fields[], + class_fields.index(&FullRange), fields, base_expr.is_none(), None); @@ -3677,7 +3599,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, variant_id, id, fcx.ccx.tcx.mk_substs(substitutions), - variant_fields[], + variant_fields.index(&FullRange), fields, true, Some(enum_id)); @@ -4056,7 +3978,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, callee::check_call(fcx, expr, &**callee, args.as_slice()); } ast::ExprMethodCall(ident, ref tps, ref args) => { - check_method_call(fcx, expr, ident, args[], tps[], lvalue_pref); + check_method_call(fcx, expr, ident, args.as_slice(), tps.as_slice(), lvalue_pref); let arg_tys = args.iter().map(|a| fcx.expr_ty(&**a)); let args_err = arg_tys.fold(false, |rest_err, a| { @@ -4144,7 +4066,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, let expected = expected.only_has_type(); let flds = expected.map_to_option(fcx, |ty| { match ty.sty { - ty::ty_tup(ref flds) => Some(flds[]), + ty::ty_tup(ref flds) => Some(flds.index(&FullRange)), _ => None } }); @@ -4178,7 +4100,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, let struct_id = match def { Some(def::DefVariant(enum_id, variant_id, true)) => { check_struct_enum_variant(fcx, id, expr.span, enum_id, - variant_id, fields[]); + variant_id, fields.index(&FullRange)); enum_id } Some(def::DefTrait(def_id)) => { @@ -4187,7 +4109,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, pprust::path_to_string(path)); check_struct_fields_on_error(fcx, id, - fields[], + fields.index(&FullRange), base_expr); def_id }, @@ -4200,7 +4122,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, id, expr.span, struct_did, - fields[], + fields.index(&FullRange), base_expr.as_ref().map(|e| &**e)); } _ => { @@ -4209,7 +4131,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, pprust::path_to_string(path)); check_struct_fields_on_error(fcx, id, - fields[], + fields.index(&FullRange), base_expr); } } @@ -4250,7 +4172,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, fcx.infcx() .ty_to_string( actual_structure_type), - type_error_description)[]); + type_error_description).index(&FullRange)); ty::note_and_explain_type_err(tcx, &type_error); } } @@ -4270,91 +4192,45 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, if ty::type_is_error(base_t) { fcx.write_ty(id, base_t); } else { - match idx.node { - ast::ExprRange(ref start, ref end) => { - // A slice, rather than an index. Special cased for now (KILLME). + check_expr(fcx, &**idx); + let idx_t = fcx.expr_ty(&**idx); + if ty::type_is_error(idx_t) { + fcx.write_ty(id, idx_t); + } else { let base_t = structurally_resolved_type(fcx, expr.span, base_t); let result = autoderef_for_index(fcx, &**base, base_t, lvalue_pref, |adj_ty, adj| { - try_overloaded_slice_step(fcx, - MethodCall::expr(expr.id), - expr, - &**base, - adj_ty, - adj, - lvalue_pref, - start, - end) + try_index_step(fcx, + MethodCall::expr(expr.id), + expr, + &**base, + adj_ty, + adj, + lvalue_pref, + idx_t) }); - let mut args = vec![]; - start.as_ref().map(|x| args.push(x)); - end.as_ref().map(|x| args.push(x)); - match result { Some((index_ty, element_ty)) => { - for a in args.iter() { - check_expr_has_type(fcx, &***a, index_ty); - } - fcx.write_ty(idx.id, element_ty); - fcx.write_ty(id, element_ty) + // FIXME: we've already checked idx above, we should + // probably just demand subtype or something here. + check_expr_has_type(fcx, &**idx, index_ty); + fcx.write_ty(id, element_ty); } _ => { - for a in args.iter() { - check_expr(fcx, &***a); - } - fcx.type_error_message(expr.span, - |actual| { - format!("cannot take a slice of a value with type `{}`", + check_expr_has_type(fcx, &**idx, fcx.tcx().types.err); + fcx.type_error_message( + expr.span, + |actual| { + format!("cannot index a value of type `{}`", actual) - }, - base_t, - None); - fcx.write_ty(idx.id, fcx.tcx().types.err); + }, + base_t, + None); fcx.write_ty(id, fcx.tcx().types.err); } } - } - _ => { - check_expr(fcx, &**idx); - let idx_t = fcx.expr_ty(&**idx); - if ty::type_is_error(idx_t) { - fcx.write_ty(id, idx_t); - } else { - let base_t = structurally_resolved_type(fcx, expr.span, base_t); - - let result = - autoderef_for_index(fcx, &**base, base_t, lvalue_pref, |adj_ty, adj| { - try_index_step(fcx, - MethodCall::expr(expr.id), - expr, - &**base, - adj_ty, - adj, - lvalue_pref) - }); - - match result { - Some((index_ty, element_ty)) => { - check_expr_has_type(fcx, &**idx, index_ty); - fcx.write_ty(id, element_ty); - } - _ => { - check_expr_has_type(fcx, &**idx, fcx.tcx().types.err); - fcx.type_error_message( - expr.span, - |actual| { - format!("cannot index a value of type `{}`", - actual) - }, - base_t, - None); - fcx.write_ty(id, fcx.tcx().types.err); - } - } - } - } } } } @@ -4387,7 +4263,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, }; // Note that we don't check the type of start/end satisfy any - // bounds because right the range structs do not have any. If we add + // bounds because right now the range structs do not have any. If we add // some bounds, then we'll need to check `t_start` against them here. let range_type = match idx_type { @@ -4859,7 +4735,7 @@ pub fn check_enum_variants(ccx: &CrateCtxt, ast::TyU16 => disr as u16 as Disr == disr, ast::TyU32 => disr as u32 as Disr == disr, ast::TyU64 => disr as u64 as Disr == disr, - ast::TyU => uint_in_range(ccx, ccx.tcx.sess.target.uint_type, disr) + ast::TyUs => uint_in_range(ccx, ccx.tcx.sess.target.uint_type, disr) } } fn int_in_range(ccx: &CrateCtxt, ty: ast::IntTy, disr: ty::Disr) -> bool { @@ -4868,7 +4744,7 @@ pub fn check_enum_variants(ccx: &CrateCtxt, ast::TyI16 => disr as i16 as Disr == disr, ast::TyI32 => disr as i32 as Disr == disr, ast::TyI64 => disr as i64 as Disr == disr, - ast::TyI => int_in_range(ccx, ccx.tcx.sess.target.int_type, disr) + ast::TyIs => int_in_range(ccx, ccx.tcx.sess.target.int_type, disr) } } match ty { @@ -4971,7 +4847,7 @@ pub fn check_enum_variants(ccx: &CrateCtxt, } let hint = *ty::lookup_repr_hints(ccx.tcx, ast::DefId { krate: ast::LOCAL_CRATE, node: id }) - [].get(0).unwrap_or(&attr::ReprAny); + .index(&FullRange).get(0).unwrap_or(&attr::ReprAny); if hint != attr::ReprAny && vs.len() <= 1 { if vs.len() == 1 { @@ -5178,7 +5054,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, } assert_eq!(segment_spaces.len(), path.segments.len()); - debug!("segment_spaces={}", segment_spaces); + debug!("segment_spaces={:?}", segment_spaces); // Next, examine the definition, and determine how many type // parameters we expect from each space. @@ -5402,7 +5278,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, .take_while(|d| d.default.is_none()) .count(); - debug!("adjust_type_parameters(space={}, \ + debug!("adjust_type_parameters(space={:?}, \ provided_len={}, \ desired_len={}, \ required_len={})", @@ -5642,7 +5518,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { "get_tydesc" => { let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) { Ok(t) => t, - Err(s) => { tcx.sess.span_fatal(it.span, s[]); } + Err(s) => { tcx.sess.span_fatal(it.span, s.index(&FullRange)); } }; let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt { ty: tydesc_ty, @@ -5658,7 +5534,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { ty::mk_struct(ccx.tcx, did, ccx.tcx.mk_substs(subst::Substs::empty()))), Err(msg) => { - tcx.sess.span_fatal(it.span, msg[]); + tcx.sess.span_fatal(it.span, msg.index(&FullRange)); } } }, diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index c7df5ed8453fd..11ad7bcb09159 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -188,8 +188,8 @@ fn region_of_def(fcx: &FnCtxt, def: def::Def) -> ty::Region { } } _ => { - tcx.sess.bug(format!("unexpected def in region_of_def: {}", - def)[]) + tcx.sess.bug(format!("unexpected def in region_of_def: {:?}", + def).index(&FullRange)) } } } @@ -282,13 +282,13 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { Some(f) => f, None => { self.tcx().sess.bug( - format!("No fn-sig entry for id={}", id)[]); + format!("No fn-sig entry for id={}", id).index(&FullRange)); } }; let len = self.region_bound_pairs.len(); - self.relate_free_regions(fn_sig[], body.id); - link_fn_args(self, CodeExtent::from_node_id(body.id), fn_decl.inputs[]); + self.relate_free_regions(fn_sig.index(&FullRange), body.id); + link_fn_args(self, CodeExtent::from_node_id(body.id), fn_decl.inputs.index(&FullRange)); self.visit_block(body); self.visit_region_obligations(body.id); self.region_bound_pairs.truncate(len); @@ -484,7 +484,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { // Check any autoderefs or autorefs that appear. for &adjustment in rcx.fcx.inh.adjustments.borrow().get(&expr.id).iter() { - debug!("adjustment={}", adjustment); + debug!("adjustment={:?}", adjustment); match *adjustment { ty::AdjustDerefRef(ty::AutoDerefRef {autoderefs, autoref: ref opt_autoref}) => { let expr_ty = rcx.resolve_node_type(expr.id); @@ -582,7 +582,9 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { Some(method) => { constrain_call(rcx, expr, Some(&**base), None::.iter(), true); - ty::ty_fn_ret(method.ty).unwrap() + let fn_ret = // late-bound regions in overloaded method calls are instantiated + ty::assert_no_late_bound_regions(rcx.tcx(), &ty::ty_fn_ret(method.ty)); + fn_ret.unwrap() } None => rcx.resolve_node_type(base.id) }; @@ -627,7 +629,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { } ast::ExprMatch(ref discr, ref arms, _) => { - link_match(rcx, &**discr, arms[]); + link_match(rcx, &**discr, arms.index(&FullRange)); visit::walk_expr(rcx, expr); } @@ -828,7 +830,7 @@ fn check_expr_fn_block(rcx: &mut Rcx, debug!("constrain_free_variables({}, {})", region_bound.repr(tcx), expr.repr(tcx)); for freevar in freevars.iter() { - debug!("freevar def is {}", freevar.def); + debug!("freevar def is {:?}", freevar.def); // Identify the variable being closed over and its node-id. let def = freevar.def; @@ -952,7 +954,7 @@ fn constrain_autoderefs<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>, ty::ty_rptr(r, ref m) => (m.mutbl, r), _ => rcx.tcx().sess.span_bug(deref_expr.span, format!("bad overloaded deref type {}", - method.ty.repr(rcx.tcx()))[]) + method.ty.repr(rcx.tcx())).index(&FullRange)) }; { let mc = mc::MemCategorizationContext::new(rcx.fcx); @@ -1038,7 +1040,7 @@ fn type_of_node_must_outlive<'a, 'tcx>( rcx.fcx.inh.adjustments.borrow().get(&id), |method_call| rcx.resolve_method_type(method_call)); debug!("constrain_regions_in_type_of_node(\ - ty={}, ty0={}, id={}, minimum_lifetime={})", + ty={}, ty0={}, id={}, minimum_lifetime={:?})", ty_to_string(tcx, ty), ty_to_string(tcx, ty0), id, minimum_lifetime); type_must_outlive(rcx, origin, ty, minimum_lifetime); @@ -1090,7 +1092,7 @@ fn link_match(rcx: &Rcx, discr: &ast::Expr, arms: &[ast::Arm]) { /// then ensures that the lifetime of the resulting pointer is /// linked to the lifetime of its guarantor (if any). fn link_fn_args(rcx: &Rcx, body_scope: CodeExtent, args: &[ast::Arg]) { - debug!("regionck::link_fn_args(body_scope={})", body_scope); + debug!("regionck::link_fn_args(body_scope={:?})", body_scope); let mc = mc::MemCategorizationContext::new(rcx.fcx); for arg in args.iter() { let arg_ty = rcx.fcx.node_ty(arg.id); @@ -1144,7 +1146,7 @@ fn link_autoref(rcx: &Rcx, autoderefs: uint, autoref: &ty::AutoRef) { - debug!("link_autoref(autoref={})", autoref); + debug!("link_autoref(autoref={:?})", autoref); let mc = mc::MemCategorizationContext::new(rcx.fcx); let expr_cmt = ignore_err!(mc.cat_expr_autoderefd(expr, autoderefs)); debug!("expr_cmt={}", expr_cmt.repr(rcx.tcx())); @@ -1165,7 +1167,7 @@ fn link_by_ref(rcx: &Rcx, expr: &ast::Expr, callee_scope: CodeExtent) { let tcx = rcx.tcx(); - debug!("link_by_ref(expr={}, callee_scope={})", + debug!("link_by_ref(expr={}, callee_scope={:?})", expr.repr(tcx), callee_scope); let mc = mc::MemCategorizationContext::new(rcx.fcx); let expr_cmt = ignore_err!(mc.cat_expr(expr)); @@ -1318,7 +1320,7 @@ fn link_reborrowed_region<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, span, format!("Illegal upvar id: {}", upvar_id.repr( - rcx.tcx()))[]); + rcx.tcx())).index(&FullRange)); } } } diff --git a/src/librustc_typeck/check/regionmanip.rs b/src/librustc_typeck/check/regionmanip.rs index 7ca21bdf5b83f..84d94b0392e9d 100644 --- a/src/librustc_typeck/check/regionmanip.rs +++ b/src/librustc_typeck/check/regionmanip.rs @@ -147,7 +147,7 @@ impl<'a, 'tcx> Wf<'a, 'tcx> { ty::ty_open(_) => { self.tcx.sess.bug( format!("Unexpected type encountered while doing wf check: {}", - ty.repr(self.tcx))[]); + ty.repr(self.tcx)).index(&FullRange)); } } } diff --git a/src/librustc_typeck/check/upvar.rs b/src/librustc_typeck/check/upvar.rs index 92fda96595c7b..ec44d765a8e68 100644 --- a/src/librustc_typeck/check/upvar.rs +++ b/src/librustc_typeck/check/upvar.rs @@ -134,7 +134,7 @@ impl<'a,'tcx> SeedBorrowKind<'a,'tcx> { let var_node_id = freevar.def.local_node_id(); let upvar_id = ty::UpvarId { var_id: var_node_id, closure_expr_id: expr.id }; - debug!("seed upvar_id {}", upvar_id); + debug!("seed upvar_id {:?}", upvar_id); let origin = UpvarRegion(upvar_id, expr.span); let freevar_region = self.infcx().next_region_var(origin); let upvar_borrow = ty::UpvarBorrow { kind: ty::ImmBorrow, @@ -269,7 +269,7 @@ impl<'a,'tcx> AdjustBorrowKind<'a,'tcx>{ upvar_id: ty::UpvarId, upvar_borrow: &mut ty::UpvarBorrow, kind: ty::BorrowKind) { - debug!("adjust_upvar_borrow_kind: id={} kind=({} -> {})", + debug!("adjust_upvar_borrow_kind: id={:?} kind=({:?} -> {:?})", upvar_id, upvar_borrow.kind, kind); match (upvar_borrow.kind, kind) { @@ -337,7 +337,7 @@ impl<'a,'tcx> euv::Delegate<'tcx> for AdjustBorrowKind<'a,'tcx> { bk: ty::BorrowKind, _loan_cause: euv::LoanCause) { - debug!("borrow(borrow_id={}, cmt={}, bk={})", + debug!("borrow(borrow_id={}, cmt={}, bk={:?})", borrow_id, cmt.repr(self.tcx()), bk); match bk { diff --git a/src/librustc_typeck/check/vtable.rs b/src/librustc_typeck/check/vtable.rs index 87ede24226bdb..e302609bf228b 100644 --- a/src/librustc_typeck/check/vtable.rs +++ b/src/librustc_typeck/check/vtable.rs @@ -74,7 +74,7 @@ pub fn check_object_cast<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, source_expr.span, format!("can only cast an boxed pointer \ to a boxed object, not a {}", - ty::ty_sort_string(fcx.tcx(), source_ty))[]); + ty::ty_sort_string(fcx.tcx(), source_ty)).index(&FullRange)); } (_, &ty::ty_rptr(..)) => { @@ -82,7 +82,7 @@ pub fn check_object_cast<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, source_expr.span, format!("can only cast a &-pointer \ to an &-object, not a {}", - ty::ty_sort_string(fcx.tcx(), source_ty))[]); + ty::ty_sort_string(fcx.tcx(), source_ty)).index(&FullRange)); } _ => { diff --git a/src/librustc_typeck/check/wf.rs b/src/librustc_typeck/check/wf.rs index d4a5bda5f97f9..84823b0fd57da 100644 --- a/src/librustc_typeck/check/wf.rs +++ b/src/librustc_typeck/check/wf.rs @@ -86,13 +86,14 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { { let ccx = self.ccx; let item_def_id = local_def(item.id); - let polytype = ty::lookup_item_type(ccx.tcx, item_def_id); + let type_scheme = ty::lookup_item_type(ccx.tcx, item_def_id); + reject_non_type_param_bounds(ccx.tcx, item.span, &type_scheme.generics); let param_env = ty::construct_parameter_environment(ccx.tcx, - &polytype.generics, + &type_scheme.generics, item.id); let inh = Inherited::new(ccx.tcx, param_env); - let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(polytype.ty), item.id); + let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(type_scheme.ty), item.id); f(self, &fcx); vtable::select_all_fcx_obligations_or_error(&fcx); regionck::regionck_item(&fcx, item); @@ -143,10 +144,12 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { item.span, region::CodeExtent::from_node_id(item.id), Some(&mut this.cache)); + let type_scheme = ty::lookup_item_type(fcx.tcx(), local_def(item.id)); let item_ty = fcx.instantiate_type_scheme(item.span, &fcx.inh.param_env.free_substs, &type_scheme.ty); + bounds_checker.check_traits_in_ty(item_ty); }); } @@ -178,6 +181,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { None => { return; } Some(t) => { t } }; + let trait_ref = fcx.instantiate_type_scheme(item.span, &fcx.inh.param_env.free_substs, &trait_ref); @@ -229,6 +233,35 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { } } +// Reject any predicates that do not involve a type parameter. +fn reject_non_type_param_bounds<'tcx>(tcx: &ty::ctxt<'tcx>, + span: Span, + generics: &ty::Generics<'tcx>) { + for predicate in generics.predicates.iter() { + match predicate { + &ty::Predicate::Trait(ty::Binder(ref tr)) => { + let self_ty = tr.self_ty(); + if !self_ty.walk().any(|t| is_ty_param(t)) { + tcx.sess.span_err( + span, + format!("cannot bound type `{}`, where clause \ + bounds may only be attached to types involving \ + type parameters", + self_ty.repr(tcx)).as_slice()) + } + } + _ => {} + } + } + + fn is_ty_param(ty: ty::Ty) -> bool { + match &ty.sty { + &ty::sty::ty_param(_) => true, + _ => false + } + } +} + impl<'ccx, 'tcx, 'v> Visitor<'v> for CheckTypeWellFormedVisitor<'ccx, 'tcx> { fn visit_item(&mut self, i: &ast::Item) { self.check_item_well_formed(i); @@ -416,7 +449,13 @@ fn enum_variants<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, match variant.node.kind { ast::TupleVariantKind(ref args) if args.len() > 0 => { let ctor_ty = ty::node_id_to_type(fcx.tcx(), variant.node.id); - let arg_tys = ty::ty_fn_args(ctor_ty); + + // the regions in the argument types come from the + // enum def'n, and hence will all be early bound + let arg_tys = + ty::assert_no_late_bound_regions( + fcx.tcx(), &ty::ty_fn_args(ctor_ty)); + AdtVariant { fields: args.iter().enumerate().map(|(index, arg)| { let arg_ty = arg_tys[index]; diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs index 70644b6e7de5d..82da22eab9837 100644 --- a/src/librustc_typeck/check/writeback.rs +++ b/src/librustc_typeck/check/writeback.rs @@ -287,7 +287,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { }) } }; - debug!("Adjustments for node {}: {}", id, resolved_adjustment); + debug!("Adjustments for node {}: {:?}", id, resolved_adjustment); self.tcx().adjustments.borrow_mut().insert( id, resolved_adjustment); } @@ -300,7 +300,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { // Resolve any method map entry match self.fcx.inh.method_map.borrow_mut().remove(&method_call) { Some(method) => { - debug!("writeback::resolve_method_map_entry(call={}, entry={})", + debug!("writeback::resolve_method_map_entry(call={:?}, entry={})", method_call, method.repr(self.tcx())); let new_method = MethodCallee { diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index 09ab98745bd6a..a0f30788bbddc 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -81,7 +81,7 @@ fn get_base_type_def_id<'a, 'tcx>(inference_context: &InferCtxt<'a, 'tcx>, inference_context.tcx.sess.span_bug( span, format!("coherence encountered unexpected type searching for base type: {}", - ty.repr(inference_context.tcx))[]); + ty.repr(inference_context.tcx)).index(&FullRange)); } } } @@ -204,7 +204,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { trait_ref: &ty::TraitRef<'tcx>, all_impl_items: &mut Vec) { let tcx = self.crate_context.tcx; - debug!("instantiate_default_methods(impl_id={}, trait_ref={})", + debug!("instantiate_default_methods(impl_id={:?}, trait_ref={})", impl_id, trait_ref.repr(tcx)); let impl_type_scheme = ty::lookup_item_type(tcx, impl_id); @@ -215,7 +215,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { let new_id = tcx.sess.next_node_id(); let new_did = local_def(new_id); - debug!("new_did={} trait_method={}", new_did, trait_method.repr(tcx)); + debug!("new_did={:?} trait_method={}", new_did, trait_method.repr(tcx)); // Create substitutions for the various trait parameters. let new_method_ty = @@ -268,7 +268,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { } fn add_trait_impl(&self, base_def_id: DefId, impl_def_id: DefId) { - debug!("add_trait_impl: base_def_id={} impl_def_id={}", + debug!("add_trait_impl: base_def_id={:?} impl_def_id={:?}", base_def_id, impl_def_id); ty::record_trait_implementation(self.crate_context.tcx, base_def_id, @@ -490,7 +490,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { format!("the trait `Copy` may not be \ implemented for this type; field \ `{}` does not implement `Copy`", - token::get_name(name))[]) + token::get_name(name)).index(&FullRange)) } Err(ty::VariantDoesNotImplementCopy(name)) => { tcx.sess @@ -498,7 +498,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { format!("the trait `Copy` may not be \ implemented for this type; variant \ `{}` does not implement `Copy`", - token::get_name(name))[]) + token::get_name(name)).index(&FullRange)) } Err(ty::TypeIsStructural) => { tcx.sess diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index bbafcdae1bba1..79e98f15a2d43 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -35,7 +35,7 @@ use middle::lang_items::SizedTraitLangItem; use middle::region; use middle::resolve_lifetime; use middle::subst; -use middle::subst::{Substs}; +use middle::subst::{Substs, TypeSpace}; use middle::ty::{AsPredicate, ImplContainer, ImplOrTraitItemContainer, TraitContainer}; use middle::ty::{self, RegionEscape, Ty, TypeScheme}; use middle::ty_fold::{self, TypeFolder, TypeFoldable}; @@ -47,6 +47,7 @@ use util::ppaux; use util::ppaux::{Repr,UserString}; use write_ty_to_tcx; +use std::collections::HashSet; use std::rc::Rc; use syntax::abi; @@ -169,7 +170,7 @@ impl<'a, 'tcx> AstConv<'tcx> for CollectCtxt<'a, 'tcx> { } x => { self.tcx.sess.bug(format!("unexpected sort of node \ - in get_item_type_scheme(): {}", + in get_item_type_scheme(): {:?}", x).as_slice()); } } @@ -211,7 +212,7 @@ fn get_enum_variant_types<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ast::TupleVariantKind(ref args) if args.len() > 0 => { let rs = ExplicitRscope; let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, &*va.ty)).collect(); - ty::mk_ctor_fn(tcx, variant_def_id, input_tys[], enum_ty) + ty::mk_ctor_fn(tcx, variant_def_id, input_tys.index(&FullRange), enum_ty) } ast::TupleVariantKind(_) => { @@ -258,7 +259,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ccx, trait_id, &trait_def.generics, - trait_items[], + trait_items.index(&FullRange), &m.id, &m.ident.name, &m.explicit_self, @@ -272,7 +273,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ccx, trait_id, &trait_def.generics, - trait_items[], + trait_items.index(&FullRange), &m.id, &m.pe_ident().name, m.pe_explicit_self(), @@ -475,7 +476,7 @@ fn convert_methods<'a,'tcx,'i,I>(ccx: &CollectCtxt<'a, 'tcx>, write_ty_to_tcx(tcx, m.id, fty); - debug!("writing method type: def_id={} mty={}", + debug!("writing method type: def_id={:?} mty={}", mty.def_id, mty.repr(ccx.tcx)); tcx.impl_or_trait_items @@ -644,6 +645,10 @@ fn convert(ccx: &CollectCtxt, it: &ast::Item) { Some(selfty), None); } + + enforce_impl_ty_params_are_constrained(ccx.tcx, + generics, + local_def(it.id)); }, ast::ItemTrait(_, _, _, ref trait_methods) => { let trait_def = trait_def_of_item(ccx, it); @@ -774,7 +779,7 @@ fn convert_struct<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, local_def(field.node.id)].ty).collect(); let ctor_fn_ty = ty::mk_ctor_fn(tcx, local_def(ctor_id), - inputs[], + inputs.index(&FullRange), selfty); write_ty_to_tcx(tcx, ctor_id, ctor_fn_ty); tcx.tcache.borrow_mut().insert(local_def(ctor_id), @@ -815,7 +820,7 @@ fn get_trait_def<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ast_map::NodeItem(item) => trait_def_of_item(ccx, &*item), _ => { ccx.tcx.sess.bug(format!("get_trait_def({}): not an item", - trait_id.node)[]) + trait_id.node).index(&FullRange)) } } } @@ -840,7 +845,7 @@ fn trait_def_of_item<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ref s => { tcx.sess.span_bug( it.span, - format!("trait_def_of_item invoked on {}", s)[]); + format!("trait_def_of_item invoked on {:?}", s).index(&FullRange)); } }; @@ -1025,8 +1030,8 @@ fn ty_generics_for_type_or_impl<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, -> ty::Generics<'tcx> { ty_generics(ccx, subst::TypeSpace, - generics.lifetimes[], - generics.ty_params[], + generics.lifetimes.index(&FullRange), + generics.ty_params.index(&FullRange), ty::Generics::empty(), &generics.where_clause) } @@ -1044,8 +1049,8 @@ fn ty_generics_for_trait<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, let mut generics = ty_generics(ccx, subst::TypeSpace, - ast_generics.lifetimes[], - ast_generics.ty_params[], + ast_generics.lifetimes.index(&FullRange), + ast_generics.ty_params.index(&FullRange), ty::Generics::empty(), &ast_generics.where_clause); @@ -1130,8 +1135,8 @@ fn ty_generics_for_fn_or_method<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, let early_lifetimes = resolve_lifetime::early_bound_lifetimes(generics); ty_generics(ccx, subst::FnSpace, - early_lifetimes[], - generics.ty_params[], + early_lifetimes.index(&FullRange), + generics.ty_params.index(&FullRange), base_generics, &generics.where_clause) } @@ -1201,7 +1206,7 @@ fn ty_generics<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, index: i as u32, def_id: local_def(l.lifetime.id), bounds: bounds }; - debug!("ty_generics: def for region param: {}", def); + debug!("ty_generics: def for region param: {:?}", def); result.regions.push(space, def); } @@ -1213,7 +1218,7 @@ fn ty_generics<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, space, param, i as u32); - debug!("ty_generics: def for type param: {}, {}", + debug!("ty_generics: def for type param: {}, {:?}", def.repr(ccx.tcx), space); result.types.push(space, def); @@ -1318,7 +1323,7 @@ fn get_or_create_type_parameter_def<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, let param_ty = ty::ParamTy::new(space, index, param.ident.name); let bounds = compute_bounds(ccx, param_ty.to_ty(ccx.tcx), - param.bounds[], + param.bounds.index(&FullRange), SizedByDefault::Yes, param.span); let default = match param.default { @@ -1399,7 +1404,7 @@ fn check_bounds_compatible<'tcx>(tcx: &ty::ctxt<'tcx>, if !param_bounds.builtin_bounds.contains(&ty::BoundSized) { ty::each_bound_trait_and_supertraits( tcx, - param_bounds.trait_bounds[], + param_bounds.trait_bounds.index(&FullRange), |trait_ref| { let trait_def = ty::lookup_trait_def(tcx, trait_ref.def_id()); if trait_def.bounds.builtin_bounds.contains(&ty::BoundSized) { @@ -1605,3 +1610,96 @@ fn check_method_self_type<'a, 'tcx, RS:RegionScope>( }) } } + +/// Checks that all the type parameters on an impl +fn enforce_impl_ty_params_are_constrained<'tcx>(tcx: &ty::ctxt<'tcx>, + ast_generics: &ast::Generics, + impl_def_id: ast::DefId) +{ + let impl_scheme = ty::lookup_item_type(tcx, impl_def_id); + let impl_trait_ref = ty::impl_trait_ref(tcx, impl_def_id); + + // The trait reference is an input, so find all type parameters + // reachable from there, to start (if this is an inherent impl, + // then just examine the self type). + let mut input_parameters: HashSet<_> = + impl_trait_ref.iter() + .flat_map(|t| t.input_types().iter()) // Types in trait ref, if any + .chain(Some(impl_scheme.ty).iter()) // Self type, always + .flat_map(|t| t.walk()) + .filter_map(to_opt_param_ty) + .collect(); + + loop { + let num_inputs = input_parameters.len(); + + let mut projection_predicates = + impl_scheme.generics.predicates + .iter() + .filter_map(|predicate| { + match *predicate { + // Ignore higher-ranked binders. For the purposes + // of this check, they don't matter because they + // only affect named regions, and we're just + // concerned about type parameters here. + ty::Predicate::Projection(ref data) => Some(data.0.clone()), + _ => None, + } + }); + + for projection in projection_predicates { + // Special case: watch out for some kind of sneaky attempt + // to project out an associated type defined by this very trait. + if Some(projection.projection_ty.trait_ref.clone()) == impl_trait_ref { + continue; + } + + let relies_only_on_inputs = + projection.projection_ty.trait_ref.input_types().iter() + .flat_map(|t| t.walk()) + .filter_map(to_opt_param_ty) + .all(|t| input_parameters.contains(&t)); + + if relies_only_on_inputs { + input_parameters.extend( + projection.ty.walk().filter_map(to_opt_param_ty)); + } + } + + if input_parameters.len() == num_inputs { + break; + } + } + + for (index, ty_param) in ast_generics.ty_params.iter().enumerate() { + let param_ty = ty::ParamTy { space: TypeSpace, + idx: index as u32, + name: ty_param.ident.name }; + if !input_parameters.contains(¶m_ty) { + if ty::has_attr(tcx, impl_def_id, "old_impl_check") { + tcx.sess.span_warn( + ty_param.span, + format!("the type parameter `{}` is not constrained by the \ + impl trait, self type, or predicates", + param_ty.user_string(tcx)).as_slice()); + } else { + tcx.sess.span_err( + ty_param.span, + format!("the type parameter `{}` is not constrained by the \ + impl trait, self type, or predicates", + param_ty.user_string(tcx)).as_slice()); + tcx.sess.span_help( + ty_param.span, + format!("you can temporarily opt out of this rule by placing \ + the `#[old_impl_check]` attribute on the impl").as_slice()); + } + } + } + + fn to_opt_param_ty<'tcx>(ty: Ty<'tcx>) -> Option { + match ty.sty { + ty::ty_param(ref d) => Some(d.clone()), + _ => None, + } + } +} diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index dc434f1401585..ae8731dfa476b 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -71,27 +71,13 @@ This API is completely unstable and subject to change. html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![feature(default_type_params, globs, macro_rules, phase, quote)] +#![feature(quote)] #![feature(slicing_syntax, unsafe_destructor)] #![feature(rustc_diagnostic_macros)] -#![feature(unboxed_closures)] #![allow(non_camel_case_types)] -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate log; - -#[cfg(not(stage0))] -#[macro_use] -extern crate log; - -#[cfg(stage0)] -#[phase(plugin, link)] -extern crate syntax; - -#[cfg(not(stage0))] -#[macro_use] -extern crate syntax; +#[macro_use] extern crate log; +#[macro_use] extern crate syntax; extern crate arena; extern crate rustc; @@ -207,7 +193,7 @@ fn require_same_types<'a, 'tcx, M>(tcx: &ty::ctxt<'tcx>, format!("{}: {}", msg(), ty::type_err_to_str(tcx, - terr))[]); + terr)).index(&FullRange)); ty::note_and_explain_type_err(tcx, terr); false } @@ -256,7 +242,7 @@ fn check_main_fn_ty(ccx: &CrateCtxt, format!("main has a non-function type: found \ `{}`", ppaux::ty_to_string(tcx, - main_t))[]); + main_t)).index(&FullRange)); } } } @@ -307,7 +293,7 @@ fn check_start_fn_ty(ccx: &CrateCtxt, tcx.sess.span_bug(start_span, format!("start has a non-function type: found \ `{}`", - ppaux::ty_to_string(tcx, start_t))[]); + ppaux::ty_to_string(tcx, start_t)).index(&FullRange)); } } } diff --git a/src/librustc_typeck/variance.rs b/src/librustc_typeck/variance.rs index e58c2275fcd96..393ba19ba182a 100644 --- a/src/librustc_typeck/variance.rs +++ b/src/librustc_typeck/variance.rs @@ -243,8 +243,8 @@ enum VarianceTerm<'a> { impl<'a> fmt::Show for VarianceTerm<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { - ConstantTerm(c1) => write!(f, "{}", c1), - TransformTerm(v1, v2) => write!(f, "({} \u{00D7} {})", v1, v2), + ConstantTerm(c1) => write!(f, "{:?}", c1), + TransformTerm(v1, v2) => write!(f, "({:?} \u{00D7} {:?})", v1, v2), InferredTerm(id) => write!(f, "[{}]", { let InferredIndex(i) = id; i }) } } @@ -323,10 +323,10 @@ impl<'a, 'tcx> TermsContext<'a, 'tcx> { assert!(newly_added); debug!("add_inferred(item_id={}, \ - kind={}, \ + kind={:?}, \ index={}, \ param_id={}, - inf_index={})", + inf_index={:?})", item_id, kind, index, param_id, inf_index); } @@ -402,8 +402,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for TermsContext<'a, 'tcx> { struct ConstraintContext<'a, 'tcx: 'a> { terms_cx: TermsContext<'a, 'tcx>, - // These are the def-id of the std::kinds::marker::InvariantType, - // std::kinds::marker::InvariantLifetime, and so on. The arrays + // These are the def-id of the std::marker::InvariantType, + // std::marker::InvariantLifetime, and so on. The arrays // are indexed by the `ParamKind` (type, lifetime, self). Note // that there are no marker types for self, so the entries for // self are always None. @@ -564,7 +564,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { None => { self.tcx().sess.bug(format!( "no inferred index entry for {}", - self.tcx().map.node_to_string(param_id))[]); + self.tcx().map.node_to_string(param_id)).index(&FullRange)); } } } @@ -673,8 +673,8 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { fn add_constraint(&mut self, InferredIndex(index): InferredIndex, variance: VarianceTermPtr<'a>) { - debug!("add_constraint(index={}, variance={})", - index, variance.to_string()); + debug!("add_constraint(index={}, variance={:?})", + index, variance); self.constraints.push(Constraint { inferred: InferredIndex(index), variance: variance }); } @@ -839,7 +839,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { self.tcx().sess.bug( format!("unexpected type encountered in \ variance inference: {}", - ty.repr(self.tcx()))[]); + ty.repr(self.tcx())).index(&FullRange)); } } } @@ -854,7 +854,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { region_param_defs: &[ty::RegionParameterDef], substs: &subst::Substs<'tcx>, variance: VarianceTermPtr<'a>) { - debug!("add_constraints_from_substs(def_id={})", def_id); + debug!("add_constraints_from_substs(def_id={:?})", def_id); for p in type_param_defs.iter() { let variance_decl = @@ -919,7 +919,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { .sess .bug(format!("unexpected region encountered in variance \ inference: {}", - region.repr(self.tcx()))[]); + region.repr(self.tcx())).index(&FullRange)); } } } @@ -988,14 +988,14 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { let new_value = glb(variance, old_value); if old_value != new_value { debug!("Updating inferred {} (node {}) \ - from {} to {} due to {}", + from {:?} to {:?} due to {:?}", inferred, self.terms_cx .inferred_infos[inferred] .param_id, old_value, new_value, - term.to_string()); + term); self.solutions[inferred] = new_value; changed = true; @@ -1028,7 +1028,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { inferred_infos[index].item_id == item_id { let info = &inferred_infos[index]; let variance = solutions[index]; - debug!("Index {} Info {} / {} / {} Variance {}", + debug!("Index {} Info {} / {:?} / {:?} Variance {:?}", index, info.index, info.kind, info.space, variance); match info.kind { TypeParam => { @@ -1055,7 +1055,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { // attribute and report an error with various results if found. if ty::has_attr(tcx, item_def_id, "rustc_variance") { let found = item_variances.repr(tcx); - tcx.sess.span_err(tcx.map.span(item_id), found[]); + tcx.sess.span_err(tcx.map.span(item_id), found.index(&FullRange)); } let newly_added = tcx.item_variance_map.borrow_mut() diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index 3eda39f54a997..3e2474468adb6 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -397,9 +397,9 @@ fn build_const(cx: &DocContext, tcx: &ty::ctxt, use syntax::print::pprust; let expr = const_eval::lookup_const_by_id(tcx, did).unwrap_or_else(|| { - panic!("expected lookup_const_by_id to succeed for {}", did); + panic!("expected lookup_const_by_id to succeed for {:?}", did); }); - debug!("converting constant expr {} to snippet", expr); + debug!("converting constant expr {:?} to snippet", expr); let sn = pprust::expr_to_string(expr); debug!("got snippet {}", sn); diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index bb9a9ac430340..bf2664bba6ad3 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -397,7 +397,7 @@ impl Clean for doctree::Module { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub enum Attribute { Word(String), List(String, Vec ), @@ -450,7 +450,7 @@ impl<'a> attr::AttrMetaMethods for &'a Attribute { fn meta_item_list(&self) -> Option<&[P]> { None } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub struct TyParam { pub name: String, pub did: ast::DefId, @@ -483,7 +483,7 @@ impl<'tcx> Clean for ty::TypeParameterDef<'tcx> { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub enum TyParamBound { RegionBound(Lifetime), TraitBound(PolyTrait, ast::TraitBoundModifier) @@ -621,7 +621,7 @@ impl<'tcx> Clean for ty::TraitRef<'tcx> { cx.external_paths.borrow_mut().as_mut().unwrap().insert(self.def_id, (fqn, TypeTrait)); - debug!("ty::TraitRef\n substs.types(TypeSpace): {}\n", + debug!("ty::TraitRef\n substs.types(TypeSpace): {:?}\n", self.substs.types.get_slice(ParamSpace::TypeSpace)); // collect any late bound regions @@ -632,7 +632,7 @@ impl<'tcx> Clean for ty::TraitRef<'tcx> { for &ty_s in ts.iter() { if let sty::ty_rptr(ref reg, _) = ty_s.sty { if let &Region::ReLateBound(_, _) = *reg { - debug!(" hit an ReLateBound {}", reg); + debug!(" hit an ReLateBound {:?}", reg); if let Some(lt) = reg.clean(cx) { late_bounds.push(lt) } @@ -674,7 +674,7 @@ impl<'tcx> Clean>> for subst::Substs<'tcx> { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub struct Lifetime(String); impl Lifetime { @@ -724,7 +724,7 @@ impl Clean> for ty::Region { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub enum WherePredicate { BoundPredicate { ty: Type, bounds: Vec }, RegionPredicate { lifetime: Lifetime, bounds: Vec}, @@ -757,7 +757,7 @@ impl Clean for ast::WherePredicate { } // maybe use a Generic enum and use ~[Generic]? -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub struct Generics { pub lifetimes: Vec, pub type_params: Vec, @@ -798,7 +798,7 @@ impl Clean for ast::Method { let all_inputs = &self.pe_fn_decl().inputs; let inputs = match self.pe_explicit_self().node { ast::SelfStatic => all_inputs.as_slice(), - _ => all_inputs[1..] + _ => all_inputs.index(&(1..)) }; let decl = FnDecl { inputs: Arguments { @@ -836,7 +836,7 @@ impl Clean for ast::TypeMethod { fn clean(&self, cx: &DocContext) -> Item { let inputs = match self.explicit_self.node { ast::SelfStatic => self.decl.inputs.as_slice(), - _ => self.decl.inputs[1..] + _ => self.decl.inputs.index(&(1..)) }; let decl = FnDecl { inputs: Arguments { @@ -908,7 +908,7 @@ impl Clean for doctree::Function { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub struct ClosureDecl { pub lifetimes: Vec, pub decl: FnDecl, @@ -929,14 +929,14 @@ impl Clean for ast::ClosureTy { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub struct FnDecl { pub inputs: Arguments, pub output: FunctionRetTy, pub attrs: Vec, } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub struct Arguments { pub values: Vec, } @@ -989,7 +989,7 @@ impl<'a, 'tcx> Clean for (ast::DefId, &'a ty::PolyFnSig<'tcx>) { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub struct Argument { pub type_: Type, pub name: String, @@ -1006,7 +1006,7 @@ impl Clean for ast::Arg { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub enum FunctionRetTy { Return(Type), NoReturn @@ -1132,7 +1132,7 @@ impl<'tcx> Clean for ty::Method<'tcx> { self.fty.sig.clone()), s => { let sig = ty::Binder(ty::FnSig { - inputs: self.fty.sig.0.inputs[1..].to_vec(), + inputs: self.fty.sig.0.inputs.index(&(1..)).to_vec(), ..self.fty.sig.0.clone() }); let s = match s { @@ -1181,7 +1181,7 @@ impl<'tcx> Clean for ty::ImplOrTraitItem<'tcx> { } /// A trait reference, which may have higher ranked lifetimes. -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub struct PolyTrait { pub trait_: Type, pub lifetimes: Vec @@ -1190,7 +1190,7 @@ pub struct PolyTrait { /// A representation of a Type suitable for hyperlinking purposes. Ideally one can get the original /// type out of the AST/ty::ctxt given one of these, if more information is needed. Most importantly /// it does not preserve mutability or boxes. -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub enum Type { /// structs/enums/traits (anything that'd be an ast::TyPath) ResolvedPath { @@ -1236,10 +1236,10 @@ pub enum Type { PolyTraitRef(Vec), } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Copy)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Copy, Show)] pub enum PrimitiveType { - Int, I8, I16, I32, I64, - Uint, U8, U16, U32, U64, + Isize, I8, I16, I32, I64, + Usize, U8, U16, U32, U64, F32, F64, Char, Bool, @@ -1264,12 +1264,12 @@ pub enum TypeKind { impl PrimitiveType { fn from_str(s: &str) -> Option { match s.as_slice() { - "int" => Some(Int), + "isize" | "int" => Some(Isize), "i8" => Some(I8), "i16" => Some(I16), "i32" => Some(I32), "i64" => Some(I64), - "uint" => Some(Uint), + "usize" | "uint" => Some(Usize), "u8" => Some(U8), "u16" => Some(U16), "u32" => Some(U32), @@ -1308,12 +1308,12 @@ impl PrimitiveType { pub fn to_string(&self) -> &'static str { match *self { - Int => "int", + Isize => "isize", I8 => "i8", I16 => "i16", I32 => "i32", I64 => "i64", - Uint => "uint", + Usize => "usize", U8 => "u8", U16 => "u16", U32 => "u32", @@ -1376,7 +1376,7 @@ impl Clean for ast::Ty { Infer }, TyTypeof(..) => { - panic!("Unimplemented type {}", self.node) + panic!("Unimplemented type {:?}", self.node) }, } } @@ -1387,12 +1387,12 @@ impl<'tcx> Clean for ty::Ty<'tcx> { match self.sty { ty::ty_bool => Primitive(Bool), ty::ty_char => Primitive(Char), - ty::ty_int(ast::TyI) => Primitive(Int), + ty::ty_int(ast::TyIs) => Primitive(Isize), ty::ty_int(ast::TyI8) => Primitive(I8), ty::ty_int(ast::TyI16) => Primitive(I16), ty::ty_int(ast::TyI32) => Primitive(I32), ty::ty_int(ast::TyI64) => Primitive(I64), - ty::ty_uint(ast::TyU) => Primitive(Uint), + ty::ty_uint(ast::TyUs) => Primitive(Usize), ty::ty_uint(ast::TyU8) => Primitive(U8), ty::ty_uint(ast::TyU16) => Primitive(U16), ty::ty_uint(ast::TyU32) => Primitive(U32), @@ -1746,7 +1746,7 @@ impl Clean for syntax::codemap::Span { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub struct Path { pub global: bool, pub segments: Vec, @@ -1761,7 +1761,7 @@ impl Clean for ast::Path { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub enum PathParameters { AngleBracketed { lifetimes: Vec, @@ -1793,7 +1793,7 @@ impl Clean for ast::PathParameters { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub struct PathSegment { pub name: String, pub params: PathParameters @@ -1857,7 +1857,7 @@ impl Clean for doctree::Typedef { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)] pub struct BareFunctionDecl { pub unsafety: ast::Unsafety, pub generics: Generics, @@ -1892,7 +1892,7 @@ pub struct Static { impl Clean for doctree::Static { fn clean(&self, cx: &DocContext) -> Item { - debug!("claning static {}: {}", self.name.clean(cx), self); + debug!("cleaning static {}: {:?}", self.name.clean(cx), self); Item { name: Some(self.name.clean(cx)), attrs: self.attrs.clean(cx), @@ -2170,7 +2170,7 @@ trait ToSource { impl ToSource for syntax::codemap::Span { fn to_src(&self, cx: &DocContext) -> String { - debug!("converting span {} to snippet", self.clean(cx)); + debug!("converting span {:?} to snippet", self.clean(cx)); let sn = match cx.sess().codemap().span_to_snippet(*self) { Some(x) => x.to_string(), None => "".to_string() @@ -2183,7 +2183,7 @@ impl ToSource for syntax::codemap::Span { fn lit_to_string(lit: &ast::Lit) -> String { match lit.node { ast::LitStr(ref st, _) => st.get().to_string(), - ast::LitBinary(ref data) => format!("{}", data), + ast::LitBinary(ref data) => format!("{:?}", data), ast::LitByte(b) => { let mut res = String::from_str("b'"); for c in (b as char).escape_default() { @@ -2202,7 +2202,7 @@ fn lit_to_string(lit: &ast::Lit) -> String { fn name_from_pat(p: &ast::Pat) -> String { use syntax::ast::*; - debug!("Trying to get a name from pattern: {}", p); + debug!("Trying to get a name from pattern: {:?}", p); match p.node { PatWild(PatWildSingle) => "_".to_string(), @@ -2265,12 +2265,12 @@ fn resolve_type(cx: &DocContext, ast::TyStr => return Primitive(Str), ast::TyBool => return Primitive(Bool), ast::TyChar => return Primitive(Char), - ast::TyInt(ast::TyI) => return Primitive(Int), + ast::TyInt(ast::TyIs) => return Primitive(Isize), ast::TyInt(ast::TyI8) => return Primitive(I8), ast::TyInt(ast::TyI16) => return Primitive(I16), ast::TyInt(ast::TyI32) => return Primitive(I32), ast::TyInt(ast::TyI64) => return Primitive(I64), - ast::TyUint(ast::TyU) => return Primitive(Uint), + ast::TyUint(ast::TyUs) => return Primitive(Usize), ast::TyUint(ast::TyU8) => return Primitive(U8), ast::TyUint(ast::TyU16) => return Primitive(U16), ast::TyUint(ast::TyU32) => return Primitive(U32), diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index a69437f7a669c..46c212a9f2dbc 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -136,7 +136,7 @@ pub fn run_core(search_paths: SearchPaths, cfgs: Vec, externs: Externs, inlined: RefCell::new(Some(HashSet::new())), populated_crate_impls: RefCell::new(HashSet::new()), }; - debug!("crate: {}", ctxt.krate); + debug!("crate: {:?}", ctxt.krate); let analysis = CrateAnalysis { exported_items: exported_items, diff --git a/src/librustdoc/html/escape.rs b/src/librustdoc/html/escape.rs index b4afb67170bbd..99cd467cdfccc 100644 --- a/src/librustdoc/html/escape.rs +++ b/src/librustdoc/html/escape.rs @@ -19,7 +19,15 @@ use std::fmt; /// string when passed to a format string. pub struct Escape<'a>(pub &'a str); +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl<'a> fmt::Show for Escape<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl<'a> fmt::String for Escape<'a> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { // Because the internet is always right, turns out there's not that many // characters to escape: http://stackoverflow.com/questions/7381974 diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index 9004d11b5bccf..b24e7a7a4cf81 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -10,7 +10,7 @@ //! HTML formatting module //! -//! This module contains a large number of `fmt::Show` implementations for +//! This module contains a large number of `fmt::String` implementations for //! various types in `rustdoc::clean`. These implementations all currently //! assume that HTML output is desired, although it may be possible to redesign //! them in the future to instead emit any format desired. @@ -64,7 +64,15 @@ impl UnsafetySpace { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl<'a> fmt::Show for TyParamBounds<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl<'a> fmt::String for TyParamBounds<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let &TyParamBounds(bounds) = self; for (i, bound) in bounds.iter().enumerate() { @@ -77,7 +85,15 @@ impl<'a> fmt::Show for TyParamBounds<'a> { } } -impl fmt::Show for clean::Generics { +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] +impl fmt::Show for clean::Generic { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::Generics { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.lifetimes.len() == 0 && self.type_params.len() == 0 { return Ok(()) } try!(f.write_str("<")); @@ -97,7 +113,7 @@ impl fmt::Show for clean::Generics { if i > 0 { try!(f.write_str(", ")) } - try!(f.write_str(tp.name[])); + try!(f.write_str(tp.name.as_slice())); if tp.bounds.len() > 0 { try!(write!(f, ": {}", TyParamBounds(tp.bounds.as_slice()))); @@ -114,7 +130,15 @@ impl fmt::Show for clean::Generics { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl<'a> fmt::Show for WhereClause<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl<'a> fmt::String for WhereClause<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let &WhereClause(gens) = self; if gens.where_predicates.len() == 0 { @@ -151,14 +175,30 @@ impl<'a> fmt::Show for WhereClause<'a> { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::Lifetime { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::Lifetime { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(f.write_str(self.get_ref())); Ok(()) } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::PolyTrait { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::PolyTrait { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.lifetimes.len() > 0 { try!(f.write_str("for<")); @@ -174,7 +214,15 @@ impl fmt::Show for clean::PolyTrait { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::TyParamBound { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::TyParamBound { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { clean::RegionBound(ref lt) => { @@ -191,7 +239,15 @@ impl fmt::Show for clean::TyParamBound { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::PathParameters { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::PathParameters { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { clean::PathParameters::AngleBracketed { ref lifetimes, ref types } => { @@ -236,14 +292,30 @@ impl fmt::Show for clean::PathParameters { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::PathSegment { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::PathSegment { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(f.write_str(self.name.as_slice())); write!(f, "{}", self.params) } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::Path { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::Path { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.global { try!(f.write_str("::")) @@ -311,7 +383,7 @@ fn path(w: &mut fmt::Formatter, match rel_root { Some(root) => { let mut root = String::from_str(root.as_slice()); - for seg in path.segments[..amt].iter() { + for seg in path.segments.index(&(0..amt)).iter() { if "super" == seg.name || "self" == seg.name { try!(write!(w, "{}::", seg.name)); @@ -326,7 +398,7 @@ fn path(w: &mut fmt::Formatter, } } None => { - for seg in path.segments[..amt].iter() { + for seg in path.segments.index(&(0..amt)).iter() { try!(write!(w, "{}::", seg.name)); } } @@ -337,7 +409,7 @@ fn path(w: &mut fmt::Formatter, // This is a documented path, link to it! Some((ref fqp, shortty)) if abs_root.is_some() => { let mut url = String::from_str(abs_root.unwrap().as_slice()); - let to_link = fqp[..fqp.len() - 1]; + let to_link = &fqp[..(fqp.len() - 1)]; for component in to_link.iter() { url.push_str(component.as_slice()); url.push_str("/"); @@ -429,11 +501,19 @@ fn tybounds(w: &mut fmt::Formatter, } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::Type { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::Type { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { clean::TyParamBinder(id) => { - f.write_str(cache().typarams[ast_util::local_def(id)][]) + f.write_str(cache().typarams[ast_util::local_def(id)].as_slice()) } clean::Generic(ref name) => { f.write_str(name.as_slice()) @@ -570,7 +650,16 @@ impl fmt::Show for clean::Type { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::Arguments { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + + +impl fmt::String for clean::Arguments { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for (i, input) in self.values.iter().enumerate() { if i > 0 { try!(write!(f, ", ")); } @@ -583,7 +672,15 @@ impl fmt::Show for clean::Arguments { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::FunctionRetTy { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::FunctionRetTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { clean::Return(clean::Tuple(ref tys)) if tys.is_empty() => Ok(()), @@ -593,13 +690,29 @@ impl fmt::Show for clean::FunctionRetTy { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::FnDecl { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::FnDecl { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "({args}){arrow}", args = self.inputs, arrow = self.output) } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl<'a> fmt::Show for Method<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl<'a> fmt::String for Method<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let Method(selfty, d) = *self; let mut args = String::new(); @@ -629,7 +742,15 @@ impl<'a> fmt::Show for Method<'a> { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for VisSpace { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for VisSpace { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.get() { Some(ast::Public) => write!(f, "pub "), @@ -638,7 +759,15 @@ impl fmt::Show for VisSpace { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for UnsafetySpace { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for UnsafetySpace { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.get() { ast::Unsafety::Unsafe => write!(f, "unsafe "), @@ -647,7 +776,15 @@ impl fmt::Show for UnsafetySpace { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::ViewPath { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::ViewPath { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { clean::SimpleImport(ref name, ref src) => { @@ -674,7 +811,15 @@ impl fmt::Show for clean::ViewPath { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::ImportSource { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::ImportSource { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.did { Some(did) => resolved_path(f, did, &self.path, true), @@ -691,7 +836,15 @@ impl fmt::Show for clean::ImportSource { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for clean::ViewListIdent { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for clean::ViewListIdent { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.source { Some(did) => { @@ -712,7 +865,15 @@ impl fmt::Show for clean::ViewListIdent { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for MutableSpace { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for MutableSpace { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { MutableSpace(clean::Immutable) => Ok(()), @@ -721,7 +882,15 @@ impl fmt::Show for MutableSpace { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for RawMutableSpace { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for RawMutableSpace { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { RawMutableSpace(clean::Immutable) => write!(f, "const "), @@ -730,13 +899,21 @@ impl fmt::Show for RawMutableSpace { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl<'a> fmt::Show for Stability<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl<'a> fmt::String for Stability<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let Stability(stab) = *self; match *stab { Some(ref stability) => { write!(f, "{lvl}", - lvl = stability.level.to_string(), + lvl = stability.level, reason = stability.text) } None => Ok(()) @@ -744,13 +921,21 @@ impl<'a> fmt::Show for Stability<'a> { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl<'a> fmt::Show for ConciseStability<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl<'a> fmt::String for ConciseStability<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let ConciseStability(stab) = *self; match *stab { Some(ref stability) => { write!(f, "", - lvl = stability.level.to_string(), + lvl = stability.level, colon = if stability.text.len() > 0 { ": " } else { "" }, reason = stability.text) } @@ -761,7 +946,15 @@ impl<'a> fmt::Show for ConciseStability<'a> { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for ModuleSummary { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for ModuleSummary { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt_inner<'a>(f: &mut fmt::Formatter, context: &mut Vec<&'a str>, diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 30b9d6c63c5bb..3d2c5e2cbb5fb 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -34,7 +34,7 @@ pub fn highlight(src: &str, class: Option<&str>, id: Option<&str>) -> String { class, id, &mut out).unwrap(); - String::from_utf8_lossy(out[]).into_owned() + String::from_utf8_lossy(out.index(&FullRange)).into_owned() } /// Exhausts the `lexer` writing the output into `out`. diff --git a/src/librustdoc/html/item_type.rs b/src/librustdoc/html/item_type.rs index 3efaf5d491442..13a06f842a276 100644 --- a/src/librustdoc/html/item_type.rs +++ b/src/librustdoc/html/item_type.rs @@ -103,7 +103,15 @@ impl ItemType { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl fmt::Show for ItemType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for ItemType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.to_static_str().fmt(f) } diff --git a/src/librustdoc/html/layout.rs b/src/librustdoc/html/layout.rs index d47c6010be0ba..f75ab3f431c2a 100644 --- a/src/librustdoc/html/layout.rs +++ b/src/librustdoc/html/layout.rs @@ -30,7 +30,7 @@ pub struct Page<'a> { pub keywords: &'a str } -pub fn render( +pub fn render( dst: &mut io::Writer, layout: &Layout, page: &Page, sidebar: &S, t: &T) -> io::IoResult<()> { diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index 3b9265cf56976..f4660a81be496 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -14,7 +14,7 @@ //! (bundled into the rust runtime). This module self-contains the C bindings //! and necessary legwork to render markdown, and exposes all of the //! functionality through a unit-struct, `Markdown`, which has an implementation -//! of `fmt::Show`. Example usage: +//! of `fmt::String`. Example usage: //! //! ```rust,ignore //! use rustdoc::html::markdown::Markdown; @@ -41,7 +41,7 @@ use html::highlight; use html::escape::Escape; use test; -/// A unit struct which has the `fmt::Show` trait implemented. When +/// A unit struct which has the `fmt::String` trait implemented. When /// formatted, this struct will emit the HTML corresponding to the rendered /// version of the contained markdown string. pub struct Markdown<'a>(pub &'a str); @@ -172,7 +172,7 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result { let text = slice::from_raw_buf(&(*orig_text).data, (*orig_text).size as uint); let origtext = str::from_utf8(text).unwrap(); - debug!("docblock: ==============\n{}\n=======", text); + debug!("docblock: ==============\n{:?}\n=======", text); let rendered = if lang.is_null() { false } else { @@ -435,7 +435,15 @@ pub fn reset_headers() { TEST_IDX.with(|s| s.set(0)); } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl<'a> fmt::Show for Markdown<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl<'a> fmt::String for Markdown<'a> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let Markdown(md) = *self; // This is actually common enough to special-case @@ -444,7 +452,15 @@ impl<'a> fmt::Show for Markdown<'a> { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl<'a> fmt::Show for MarkdownWithToc<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl<'a> fmt::String for MarkdownWithToc<'a> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let MarkdownWithToc(md) = *self; render(fmt, md.as_slice(), true) diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index ddb14d6944b9d..08abdc2af18dc 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -404,7 +404,7 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> io::IoResult search_index.push(IndexItem { ty: shortty(item), name: item.name.clone().unwrap(), - path: fqp[..fqp.len() - 1].connect("::"), + path: fqp[..(fqp.len() - 1)].connect("::"), desc: shorter(item.doc_value()).to_string(), parent: Some(did), }); @@ -559,7 +559,7 @@ fn write_shared(cx: &Context, }; let mut mydst = dst.clone(); - for part in remote_path[..remote_path.len() - 1].iter() { + for part in remote_path[..(remote_path.len() - 1)].iter() { mydst.push(part.as_slice()); try!(mkdir(&mydst)); } @@ -821,7 +821,7 @@ impl DocFolder for Cache { if let clean::ImplItem(ref i) = item.inner { match i.trait_ { Some(clean::ResolvedPath{ did, .. }) => { - let v = self.implementors.entry(&did).get().unwrap_or_else( + let v = self.implementors.entry(did).get().unwrap_or_else( |vacant_entry| vacant_entry.insert(Vec::with_capacity(1))); v.push(Implementor { def_id: item.def_id, @@ -842,7 +842,7 @@ impl DocFolder for Cache { clean::StructFieldItem(..) | clean::VariantItem(..) => { ((Some(*self.parent_stack.last().unwrap()), - Some(self.stack[..self.stack.len() - 1])), + Some(&self.stack[..(self.stack.len() - 1)])), false) } clean::MethodItem(..) => { @@ -853,13 +853,13 @@ impl DocFolder for Cache { let did = *last; let path = match self.paths.get(&did) { Some(&(_, ItemType::Trait)) => - Some(self.stack[..self.stack.len() - 1]), + Some(&self.stack[..(self.stack.len() - 1)]), // The current stack not necessarily has correlation for // where the type was defined. On the other hand, // `paths` always has the right information if present. Some(&(ref fqp, ItemType::Struct)) | Some(&(ref fqp, ItemType::Enum)) => - Some(fqp[..fqp.len() - 1]), + Some(&fqp[..(fqp.len() - 1)]), Some(..) => Some(self.stack.as_slice()), None => None }; @@ -1011,7 +1011,7 @@ impl DocFolder for Cache { }; if let Some(did) = did { - let v = self.impls.entry(&did).get().unwrap_or_else( + let v = self.impls.entry(did).get().unwrap_or_else( |vacant_entry| vacant_entry.insert(Vec::with_capacity(1))); v.push(Impl { impl_: i, @@ -1051,7 +1051,7 @@ impl Context { F: FnOnce(&mut Context) -> T, { if s.len() == 0 { - panic!("Unexpected empty destination: {}", self.current); + panic!("Unexpected empty destination: {:?}", self.current); } let prev = self.dst.clone(); self.dst.push(s.as_slice()); @@ -1185,7 +1185,7 @@ impl Context { .collect::(); match cache().paths.get(&it.def_id) { Some(&(ref names, _)) => { - for name in names[..names.len() - 1].iter() { + for name in (&names[..(names.len() - 1)]).iter() { url.push_str(name.as_slice()); url.push_str("/"); } @@ -1260,7 +1260,7 @@ impl Context { Some(ref s) => s.to_string(), }; let short = short.to_string(); - let v = map.entry(&short).get().unwrap_or_else( + let v = map.entry(short).get().unwrap_or_else( |vacant_entry| vacant_entry.insert(Vec::with_capacity(1))); v.push(myname); } @@ -1351,8 +1351,15 @@ impl<'a> Item<'a> { } - +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl<'a> fmt::Show for Item<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl<'a> fmt::String for Item<'a> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { // Write the breadcrumb trail header for the top try!(write!(fmt, "\n

")); @@ -1542,7 +1549,7 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, indices.sort_by(|&i1, &i2| cmp(&items[i1], &items[i2], i1, i2)); - debug!("{}", indices); + debug!("{:?}", indices); let mut curty = None; for &idx in indices.iter() { let myitem = &items[idx]; @@ -1626,7 +1633,16 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, } struct Initializer<'a>(&'a str); + +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl<'a> fmt::Show for Initializer<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl<'a> fmt::String for Initializer<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let Initializer(s) = *self; if s.len() == 0 { return Ok(()); } @@ -2127,7 +2143,7 @@ fn render_impl(w: &mut fmt::Formatter, i: &Impl) -> fmt::Result { try!(assoc_type(w, item, typaram)); try!(write!(w, "

\n")); } - _ => panic!("can't make docs for trait item with name {}", item.name) + _ => panic!("can't make docs for trait item with name {:?}", item.name) } match item.doc_value() { Some(s) if dox => { @@ -2188,7 +2204,15 @@ fn item_typedef(w: &mut fmt::Formatter, it: &clean::Item, document(w, it) } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl<'a> fmt::Show for Sidebar<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl<'a> fmt::String for Sidebar<'a> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let cx = self.cx; let it = self.item; @@ -2243,7 +2267,15 @@ impl<'a> fmt::Show for Sidebar<'a> { } } +//NOTE(stage0): remove impl after snapshot +#[cfg(stage0)] impl<'a> fmt::Show for Source<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl<'a> fmt::String for Source<'a> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let Source(s) = *self; let lines = s.lines().count(); @@ -2267,7 +2299,7 @@ fn item_macro(w: &mut fmt::Formatter, it: &clean::Item, t: &clean::Macro) -> fmt::Result { try!(w.write_str(highlight::highlight(t.source.as_slice(), Some("macro"), - None)[])); + None).as_slice())); document(w, it) } diff --git a/src/librustdoc/html/toc.rs b/src/librustdoc/html/toc.rs index 71313ea90b8ae..8d94e1857c417 100644 --- a/src/librustdoc/html/toc.rs +++ b/src/librustdoc/html/toc.rs @@ -177,6 +177,12 @@ impl TocBuilder { } impl fmt::Show for Toc { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::String::fmt(self, f) + } +} + +impl fmt::String for Toc { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { try!(write!(fmt, "