Skip to content

Commit

Permalink
Update MSRV to Rust 1.79 (rerun-io#7563)
Browse files Browse the repository at this point in the history
### What
* Required for rerun-io#7298

### TODO
* [x] Fix the debug-assertion
* [x] Fix all new clippy lints
* [x] Add more clippy lints thats been added in 1.77, 1.78, 1.79


### MiMalloc bug
Debug assertions are now part of stdlib:
https://blog.rust-lang.org/2024/05/02/Rust-1.78.0.html#asserting-unsafe-preconditions,
making us hit this bug in MiMalloc:

* purpleprotocol/mimalloc_rust#128

I also opened this issue on the standard library:
* rust-lang/rust#131189

### Checklist
* [x] I have read and agree to [Contributor
Guide](https://github.com/rerun-io/rerun/blob/main/CONTRIBUTING.md) and
the [Code of
Conduct](https://github.com/rerun-io/rerun/blob/main/CODE_OF_CONDUCT.md)
* [x] I've included a screenshot or gif (if applicable)
* [x] I have tested the web demo (if applicable):
* Using examples from latest `main` build:
[rerun.io/viewer](https://rerun.io/viewer/pr/7563?manifest_url=https://app.rerun.io/version/main/examples_manifest.json)
* Using full set of examples from `nightly` build:
[rerun.io/viewer](https://rerun.io/viewer/pr/7563?manifest_url=https://app.rerun.io/version/nightly/examples_manifest.json)
* [x] The PR title and labels are set such as to maximize their
usefulness for the next release's CHANGELOG
* [x] If applicable, add a new check to the [release
checklist](https://github.com/rerun-io/rerun/blob/main/tests/python/release_checklist)!
* [x] If have noted any breaking changes to the log API in
`CHANGELOG.md` and the migration guide

- [PR Build Summary](https://build.rerun.io/pr/7563)
- [Recent benchmark results](https://build.rerun.io/graphs/crates.html)
- [Wasm size tracking](https://build.rerun.io/graphs/sizes.html)

To run all checks from `main`, comment on the PR with `@rerun-bot
full-check`.

---------

Co-authored-by: Andreas Reich <[email protected]>
  • Loading branch information
emilk and Wumpf authored Oct 3, 2024
1 parent 3b7d555 commit 86f461e
Show file tree
Hide file tree
Showing 94 changed files with 248 additions and 218 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ jobs:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@master
with:
toolchain: 1.76.0
toolchain: 1.79.0

- run: cargo build -p rerun

Expand Down
4 changes: 2 additions & 2 deletions BUILD.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@ cd rerun

Now install the `pixi` package manager: <https://github.com/prefix-dev/pixi?tab=readme-ov-file#installation>

Make sure `cargo --version` prints `1.76.0` once you are done.
Make sure `cargo --version` prints `1.79.0` once you are done.

If you are using an Apple-silicon Mac (M1, M2), make sure `rustc -vV` outputs `host: aarch64-apple-darwin`. If not, this should fix it:

```sh
rustup set default-host aarch64-apple-darwin && rustup install 1.76.0
rustup set default-host aarch64-apple-darwin && rustup install 1.79.0
```

## Building and running the Viewer
Expand Down
4 changes: 2 additions & 2 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -953,9 +953,9 @@ checksum = "e1e5f035d16fc623ae5f74981db80a439803888314e3a555fd6f04acd51a3205"

[[package]]
name = "bytemuck"
version = "1.13.1"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea"
checksum = "94bbb0ad554ad961ddc5da507a12a29b14e4ae5bda06b19f575a3e6079d2e2ae"
dependencies = [
"bytemuck_derive",
]
Expand Down
16 changes: 13 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ homepage = "https://rerun.io"
include = ["../../LICENSE-APACHE", "../../LICENSE-MIT", "**/*.rs", "Cargo.toml"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/rerun-io/rerun"
rust-version = "1.76"
rust-version = "1.79"
version = "0.19.0-alpha.1+dev"

[workspace.dependencies]
Expand Down Expand Up @@ -148,7 +148,7 @@ bincode = "1.3"
bit-vec = "0.7"
bitflags = { version = "2.4", features = ["bytemuck"] }
blackbox = "0.2.0"
bytemuck = { version = "1.11", features = ["extern_crate_alloc"] }
bytemuck = { version = "1.18", features = ["extern_crate_alloc"] }
camino = "1.1"
cargo_metadata = "0.18"
cargo-run-wasm = "0.3.2"
Expand Down Expand Up @@ -385,6 +385,7 @@ disallowed_types = "warn" # See clippy.toml
doc_link_with_quotes = "warn"
doc_markdown = "warn"
empty_enum = "warn"
empty_enum_variants_with_brackets = "warn"
enum_glob_use = "warn"
equatable_if_let = "warn"
exit = "warn"
Expand All @@ -408,6 +409,8 @@ inefficient_to_string = "warn"
infinite_loop = "warn"
into_iter_without_iter = "warn"
invalid_upcast_comparisons = "warn"
iter_filter_is_ok = "warn"
iter_filter_is_some = "warn"
iter_not_returning_iterator = "warn"
iter_on_empty_collections = "warn"
iter_on_single_items = "warn"
Expand All @@ -424,6 +427,7 @@ macro_use_imports = "warn"
manual_assert = "warn"
manual_clamp = "warn"
manual_instant_elapsed = "warn"
manual_is_variant_and = "warn"
manual_let_else = "warn"
manual_ok_or = "warn"
manual_string_new = "warn"
Expand All @@ -438,6 +442,7 @@ mismatched_target_os = "warn"
mismatching_type_param_order = "warn"
missing_enforced_import_renames = "warn"
missing_safety_doc = "warn"
mixed_attributes_style = "warn"
mut_mut = "warn"
mutex_integer = "warn"
needless_borrow = "warn"
Expand All @@ -447,24 +452,28 @@ needless_pass_by_ref_mut = "warn"
needless_pass_by_value = "warn"
negative_feature_names = "warn"
nonstandard_macro_braces = "warn"
option_as_ref_cloned = "warn"
option_option = "warn"
path_buf_push_overwrite = "warn"
ptr_as_ptr = "warn"
ptr_cast_constness = "warn"
pub_underscore_fields = "warn"
pub_without_shorthand = "warn"
rc_mutex = "warn"
readonly_write_lock = "warn"
redundant_type_annotations = "warn"
ref_as_ptr = "warn"
ref_option_ref = "warn"
rest_pat_in_fully_bound_structs = "warn"
same_functions_in_if_condition = "warn"
semicolon_if_nothing_returned = "warn"
should_panic_without_expect = "warn"
significant_drop_tightening = "warn"
single_match_else = "warn"
str_split_at_newline = "warn"
str_to_string = "warn"
string_add_assign = "warn"
string_add = "warn"
string_add_assign = "warn"
string_lit_as_bytes = "warn"
string_lit_chars_any = "warn"
string_to_string = "warn"
Expand Down Expand Up @@ -498,6 +507,7 @@ zero_sized_map_values = "warn"
# Disabled waiting on https://github.com/rust-lang/rust-clippy/issues/9602
#self_named_module_files = "warn"

assigning_clones = "allow" # Too much for too little
manual_range_contains = "allow" # this one is just worse imho
map_unwrap_or = "allow" # so is this one
ref_patterns = "allow" # It's nice to avoid ref pattern, but there are some situations that are hard (impossible?) to express without.
Expand Down
5 changes: 4 additions & 1 deletion clippy.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# -----------------------------------------------------------------------------
# Section identical to the main scripts/clippy_wasm/clippy.toml:

msrv = "1.76"
msrv = "1.79"

allow-unwrap-in-tests = true

Expand Down Expand Up @@ -77,13 +77,16 @@ doc-valid-idents = [
"GLTF",
"iOS",
"macOS",
"MessagePack",
"MiMalloc",
"NaN",
"OBJ",
"OpenGL",
"PyPI",
"sRGB",
"sRGBA",
"WebGL",
"WebGPU",
"WebSocket",
"WebSockets",
]
1 change: 1 addition & 0 deletions crates/build/re_dev_tools/src/build_examples/example.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@ impl ExamplesManifest {
#[derive(serde::Deserialize)]
pub struct ExampleCategory {
/// Used to sort categories in the `rerun.io/examples` navbar.
#[allow(unused)]
pub order: u64,

/// `snake_case` name.
Expand Down
2 changes: 2 additions & 0 deletions crates/build/re_dev_tools/src/build_search_index/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,13 @@ pub trait CommandExt {
I: IntoIterator<Item = S>,
S: AsRef<OsStr>;

#[allow(unused)]
fn with_env<K, V>(self, key: K, val: V) -> Self
where
K: AsRef<OsStr>,
V: AsRef<OsStr>;

#[allow(unused)]
fn run(self) -> io::Result<()>;

fn output(self) -> anyhow::Result<Vec<u8>>;
Expand Down
2 changes: 1 addition & 1 deletion crates/build/re_types_builder/src/codegen/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ pub struct ExampleInfo<'a> {
/// Path to the snippet relative to the snippet directory.
pub path: &'a str,

/// The snake_case name of the example.
/// The `snake_case` name of the example.
pub name: String,

/// The human-readable name of the example.
Expand Down
10 changes: 3 additions & 7 deletions crates/build/re_types_builder/src/codegen/python/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -185,10 +185,10 @@ struct ExtensionClass {
/// a default implementation.
has_array: bool,

/// Whether the `ObjectExt` contains __native_to_pa_array__()
/// Whether the `ObjectExt` contains `__native_to_pa_array__()`
has_native_to_pa_array: bool,

/// Whether the `ObjectExt` contains a deferred_patch_class() method
/// Whether the `ObjectExt` contains a `deferred_patch_class()` method
has_deferred_patch_class: bool,
}

Expand Down Expand Up @@ -1509,11 +1509,7 @@ fn quote_union_aliases_from_object<'a>(
let name = &obj.name;

let union_fields = field_types.join(",");
let aliases = if let Some(aliases) = aliases {
aliases
} else {
String::new()
};
let aliases = aliases.unwrap_or_default();

unindent(&format!(
r#"
Expand Down
6 changes: 3 additions & 3 deletions crates/build/re_types_builder/src/objects.rs
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ impl ObjectKind {
/// an enum.
#[derive(Debug, Clone)]
pub struct Object {
/// Utf8Path of the associated fbs definition in the Flatbuffers hierarchy, e.g. `//rerun/components/point2d.fbs`.
/// `Utf8Path` of the associated fbs definition in the Flatbuffers hierarchy, e.g. `//rerun/components/point2d.fbs`.
pub virtpath: String,

/// Absolute filepath of the associated fbs definition.
Expand All @@ -296,7 +296,7 @@ pub struct Object {
/// Fully-qualified package name of the object, e.g. `rerun.components`.
pub pkg_name: String,

/// PascalCase name of the object type, e.g. `Position2D`.
/// `PascalCase` name of the object type, e.g. `Position2D`.
pub name: String,

/// The object's multiple layers of documentation.
Expand Down Expand Up @@ -650,7 +650,7 @@ pub enum ObjectClass {
/// union value.
#[derive(Debug, Clone)]
pub struct ObjectField {
/// Utf8Path of the associated fbs definition in the Flatbuffers hierarchy, e.g. `//rerun/components/point2d.fbs`.
/// `Utf8Path` of the associated fbs definition in the Flatbuffers hierarchy, e.g. `//rerun/components/point2d.fbs`.
pub virtpath: String,

/// Absolute filepath of the associated fbs definition.
Expand Down
6 changes: 2 additions & 4 deletions crates/store/re_chunk/src/transport.rs
Original file line number Diff line number Diff line change
Expand Up @@ -246,8 +246,7 @@ impl TransportChunk {
pub fn is_sorted(&self) -> bool {
self.schema
.metadata
.get(Self::CHUNK_METADATA_MARKER_IS_SORTED_BY_ROW_ID)
.is_some()
.contains_key(Self::CHUNK_METADATA_MARKER_IS_SORTED_BY_ROW_ID)
}

/// Iterates all columns of the specified `kind`.
Expand Down Expand Up @@ -531,8 +530,7 @@ impl Chunk {

let is_sorted = field
.metadata
.get(TransportChunk::FIELD_METADATA_MARKER_IS_SORTED_BY_TIME)
.is_some();
.contains_key(TransportChunk::FIELD_METADATA_MARKER_IS_SORTED_BY_TIME);

let time_column = TimeColumn::new(
is_sorted.then_some(true),
Expand Down
2 changes: 1 addition & 1 deletion crates/store/re_chunk/tests/memory_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
use std::sync::atomic::{AtomicUsize, Ordering::Relaxed};

thread_local! {
static LIVE_BYTES_IN_THREAD: AtomicUsize = AtomicUsize::new(0);
static LIVE_BYTES_IN_THREAD: AtomicUsize = const { AtomicUsize::new(0) };
}

pub struct TrackingAllocator {
Expand Down
6 changes: 3 additions & 3 deletions crates/store/re_chunk_store/src/dataframe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -818,7 +818,7 @@ pub struct QueryExpression2 {
/// Only rows where at least 1 of the view-contents contains non-null data within that range will be kept in
/// the final dataset.
///
/// This is ignored if [QueryExpression2::`sampled_index_values`] is set.
/// This is ignored if [`QueryExpression2::sampled_index_values`] is set.
///
/// Example: `ResolvedTimeRange(10, 20)`.
pub filtered_index_range: Option<IndexRange>,
Expand All @@ -830,7 +830,7 @@ pub struct QueryExpression2 {
/// Only rows where at least 1 column contains non-null data at these specific values will be kept
/// in the final dataset.
///
/// This is ignored if [QueryExpression2::`sampled_index_values`] is set.
/// This is ignored if [`QueryExpression2::sampled_index_values`] is set.
///
/// Example: `[TimeInt(12), TimeInt(14)]`.
pub filtered_index_values: Option<BTreeSet<IndexValue>>,
Expand All @@ -844,7 +844,7 @@ pub struct QueryExpression2 {
///
/// The order of the samples will be respected in the final result.
///
/// If [QueryExpression2::`sampled_index_values`] is set, it overrides both [`QueryExpression2::filtered_index_range`]
/// If [`QueryExpression2::sampled_index_values`] is set, it overrides both [`QueryExpression2::filtered_index_range`]
/// and [`QueryExpression2::filtered_index_values`].
///
/// Example: `[TimeInt(12), TimeInt(14)]`.
Expand Down
3 changes: 1 addition & 2 deletions crates/store/re_chunk_store/src/events.rs
Original file line number Diff line number Diff line change
Expand Up @@ -249,8 +249,7 @@ mod tests {
.or_default() += delta_chunks;

for (component_name, list_array) in event.chunk.components() {
let delta =
event.delta() * list_array.iter().filter(Option::is_some).count() as i64;
let delta = event.delta() * list_array.iter().flatten().count() as i64;
*self.component_names.entry(*component_name).or_default() += delta;
}

Expand Down
2 changes: 1 addition & 1 deletion crates/store/re_chunk_store/tests/memory_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use std::sync::{
static LIVE_BYTES_GLOBAL: AtomicUsize = AtomicUsize::new(0);

thread_local! {
static LIVE_BYTES_IN_THREAD: AtomicUsize = AtomicUsize::new(0);
static LIVE_BYTES_IN_THREAD: AtomicUsize = const { AtomicUsize::new(0) };
}

pub struct TrackingAllocator {
Expand Down
13 changes: 7 additions & 6 deletions crates/store/re_data_loader/src/loader_rrd.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use std::{io::Read, sync::mpsc::Receiver};

use re_log_encoding::decoder::Decoder;

// ---
Expand Down Expand Up @@ -144,15 +142,16 @@ fn decode_and_stream<R: std::io::Read>(

// Retryable file reader that keeps retrying to read more data despite
// reading zero bytes or reaching EOF.
#[cfg(not(target_arch = "wasm32"))]
struct RetryableFileReader {
reader: std::io::BufReader<std::fs::File>,
rx: Receiver<notify::Result<notify::Event>>,
rx: std::sync::mpsc::Receiver<notify::Result<notify::Event>>,
#[allow(dead_code)]
watcher: notify::RecommendedWatcher,
}

#[cfg(not(target_arch = "wasm32"))]
impl RetryableFileReader {
#[cfg(not(target_arch = "wasm32"))]
fn new(filepath: &std::path::Path) -> Result<Self, crate::DataLoaderError> {
use anyhow::Context as _;
use notify::{RecursiveMode, Watcher};
Expand All @@ -177,7 +176,8 @@ impl RetryableFileReader {
}
}

impl Read for RetryableFileReader {
#[cfg(not(target_arch = "wasm32"))]
impl std::io::Read for RetryableFileReader {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
loop {
match self.reader.read(buf) {
Expand All @@ -194,6 +194,7 @@ impl Read for RetryableFileReader {
}
}

#[cfg(not(target_arch = "wasm32"))]
impl RetryableFileReader {
fn block_until_file_changes(&self) -> std::io::Result<usize> {
#[allow(clippy::disallowed_methods)]
Expand Down Expand Up @@ -241,7 +242,7 @@ mod tests {
};
std::fs::remove_file(&rrd_file_path).ok(); // Remove the file just in case a previous test crashes hard.
let rrd_file = std::fs::OpenOptions::new()
.create(true)
.create_new(true)
.write(true)
.open(rrd_file_path.to_str().unwrap())
.unwrap();
Expand Down
2 changes: 1 addition & 1 deletion crates/store/re_dataframe/src/latest_at.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ impl LatestAtQueryHandle<'_> {

// If the query didn't return anything at all, we just want a properly empty Recordbatch with
// the right schema.
let null_array_length = max_time_per_timeline.get(&self.query.timeline).is_some() as usize;
let null_array_length = max_time_per_timeline.contains_key(&self.query.timeline) as usize;

// NOTE: Keep in mind this must match the ordering specified by `Self::schema`.
let packed_arrays = {
Expand Down
2 changes: 1 addition & 1 deletion crates/store/re_entity_db/examples/memory_usage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
use std::sync::atomic::{AtomicUsize, Ordering::Relaxed};

thread_local! {
static LIVE_BYTES_IN_THREAD: AtomicUsize = AtomicUsize::new(0);
static LIVE_BYTES_IN_THREAD: AtomicUsize = const { AtomicUsize::new(0) };
}

struct TrackingAllocator {
Expand Down
5 changes: 1 addition & 4 deletions crates/store/re_log_encoding/src/file_sink.rs
Original file line number Diff line number Diff line change
Expand Up @@ -170,10 +170,7 @@ fn spawn_and_stream<W: std::io::Write + Send + 'static>(
impl fmt::Debug for FileSink {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FileSink")
.field(
"path",
&self.path.as_ref().cloned().unwrap_or("stdout".into()),
)
.field("path", &self.path.clone().unwrap_or("stdout".into()))
.finish_non_exhaustive()
}
}
Loading

0 comments on commit 86f461e

Please sign in to comment.