Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hanabi1224 upgrade deps #1770

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,14 @@ byteorder = "1.3.4"
cpu-time = "1.0.0"
criterion = "0.3.2"
dialoguer = "0.10.0"
fdlimit = "0.2.0"
fdlimit = "0.3"
ff = "0.13.0"
fil_logger = "0.1.6"
generic-array = "0.14.4"
gperftools = "0.2"
hex = "0.4.2"
humansize = "1.1.0"
itertools = "0.10.3"
itertools = "0.13"
lazy_static = "1.2"
log = "0.4.7"
memmap2 = "0.5.6"
Expand All @@ -58,5 +58,5 @@ serde_json = "1.0"
sha2 = "0.10.2"
structopt = "0.3.12"
tempfile = "3"
thiserror = "1.0.6"
thiserror = "2"
typenum = "1.11.2"
2 changes: 1 addition & 1 deletion fil-proofs-param/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ lazy_static.workspace = true
log.workspace = true
pbr = "1.0"
rand.workspace = true
reqwest = { version = "0.11.10", default-features = false, features = ["blocking", "native-tls-vendored"] }
reqwest = { version = "0.12", default-features = false, features = ["blocking", "native-tls-vendored"] }
serde_json.workspace = true
structopt.workspace = true
tar = "0.4.26"
Expand Down
5 changes: 2 additions & 3 deletions fil-proofs-param/src/bin/paramfetch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,8 @@ struct FetchProgress<R> {

impl<R: Read> Read for FetchProgress<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.reader.read(buf).map(|n| {
self.progress_bar.add(n as u64);
n
self.reader.read(buf).inspect(|n| {
self.progress_bar.add(*n as u64);
})
}
}
Expand Down
2 changes: 1 addition & 1 deletion fil-proofs-param/tests/paramfetch/session.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ impl ParamFetchSessionBuilder {
s.push_str(&wl.join(","));
s
})
.unwrap_or_else(|| "".to_string());
.unwrap_or_default();

let json_argument = if self.manifest.is_some() {
format!("--json={:?}", self.manifest.expect("missing manifest"))
Expand Down
6 changes: 3 additions & 3 deletions fil-proofs-tooling/src/bin/benchy/porep.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ fn run_pre_commit_phases<Tree: 'static + MerkleTreeTrait>(
OpenOptions::new().read(true).write(true).open(&staged_file_path)
} else {
info!("*** Creating staged file");
OpenOptions::new().read(true).write(true).create(true).open(&staged_file_path)
OpenOptions::new().read(true).write(true).create(true).truncate(true).open(&staged_file_path)
}?;

let sealed_file_path = cache_dir.join(SEALED_FILE);
Expand All @@ -103,7 +103,7 @@ fn run_pre_commit_phases<Tree: 'static + MerkleTreeTrait>(
OpenOptions::new().read(true).write(true).open(&sealed_file_path)
} else {
info!("*** Creating sealed file");
OpenOptions::new().read(true).write(true).create(true).open(&sealed_file_path)
OpenOptions::new().read(true).write(true).create(true).truncate(true).open(&sealed_file_path)
}?;

let sector_size_unpadded_bytes_amount =
Expand All @@ -120,7 +120,7 @@ fn run_pre_commit_phases<Tree: 'static + MerkleTreeTrait>(
.collect();

info!("*** Created piece file");
let mut piece_file = OpenOptions::new().read(true).write(true).create(true).open(&piece_file_path)?;
let mut piece_file = OpenOptions::new().read(true).write(true).create(true).truncate(true).open(&piece_file_path)?;
piece_file.write_all(&piece_bytes)?;
piece_file.sync_all()?;
piece_file.rewind()?;
Expand Down
6 changes: 3 additions & 3 deletions fil-proofs-tooling/src/bin/benchy/window_post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ fn run_pre_commit_phases<Tree: 'static + MerkleTreeTrait>(
OpenOptions::new().read(true).write(true).open(&staged_file_path)
} else {
info!("*** Creating staged file");
OpenOptions::new().read(true).write(true).create(true).open(&staged_file_path)
OpenOptions::new().read(true).write(true).create(true).truncate(true).open(&staged_file_path)
}?;

let sealed_file_path = cache_dir.join(SEALED_FILE);
Expand All @@ -110,7 +110,7 @@ fn run_pre_commit_phases<Tree: 'static + MerkleTreeTrait>(
OpenOptions::new().read(true).write(true).open(&sealed_file_path)
} else {
info!("*** Creating sealed file");
OpenOptions::new().read(true).write(true).create(true).open(&sealed_file_path)
OpenOptions::new().read(true).write(true).create(true).truncate(true).open(&sealed_file_path)
}?;

let sector_size_unpadded_bytes_amount =
Expand All @@ -128,7 +128,7 @@ fn run_pre_commit_phases<Tree: 'static + MerkleTreeTrait>(
.collect();

info!("*** Created piece file");
let mut piece_file = OpenOptions::new().read(true).write(true).create(true).open(&piece_file_path)?;
let mut piece_file = OpenOptions::new().read(true).write(true).create(true).truncate(true).open(&piece_file_path)?;
piece_file.write_all(&piece_bytes)?;
piece_file.sync_all()?;
piece_file.rewind()?;
Expand Down
4 changes: 2 additions & 2 deletions fil-proofs-tooling/src/bin/benchy/winning_post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,8 @@ pub fn run_fallback_post_bench<Tree: 'static + MerkleTreeTrait>(
create_replica::<Tree>(sector_size, fake_replica, api_version, api_features);

// Store the replica's private and publicly facing info for proving and verifying respectively.
let pub_replica_info = vec![(sector_id, replica_output.public_replica_info.clone())];
let priv_replica_info = vec![(sector_id, replica_output.private_replica_info.clone())];
let pub_replica_info = [(sector_id, replica_output.public_replica_info.clone())];
let priv_replica_info = [(sector_id, replica_output.private_replica_info.clone())];

let post_config = PoStConfig {
sector_size: sector_size.into(),
Expand Down
13 changes: 11 additions & 2 deletions fil-proofs-tooling/src/bin/fdlimit/main.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,15 @@
fn main() {
fil_logger::init();

let res = fdlimit::raise_fd_limit().expect("failed to raise fd limit");
println!("File descriptor limit was raised to {}", res);
match fdlimit::raise_fd_limit() {
Ok(fdlimit::Outcome::LimitRaised { from, to }) => {
println!("File descriptor limit was raised from {from} to {to}");
}
Ok(fdlimit::Outcome::Unsupported) => {
panic!("failed to raise fd limit: unsupported")
}
Err(e) => {
panic!("failed to raise fd limit: {}", e)
}
}
}
2 changes: 1 addition & 1 deletion fil-proofs-tooling/src/bin/gpu-cpu-test/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ fn thread_fun(
) -> RunInfo {
let timing = Instant::now();
let mut iteration = 0;
while iteration < std::u8::MAX {
while iteration < u8::MAX {
info!("iter {}", iteration);

// This is the higher priority proof, get it on the GPU even if there is one running
Expand Down
2 changes: 1 addition & 1 deletion fil-proofs-tooling/src/shared.rs
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ pub fn create_replicas<Tree: 'static + MerkleTreeTrait>(
let priv_infos = sealed_files
.iter()
.zip(seal_pre_commit_outputs.return_value.iter())
.zip(cache_dirs.into_iter())
.zip(cache_dirs)
.map(|((sealed_file, seal_pre_commit_output), cache_dir)| {
PrivateReplicaInfo::new(
sealed_file.to_path_buf(),
Expand Down
2 changes: 1 addition & 1 deletion filecoin-proofs/src/api/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ where
/// # Arguments
///
/// * `source` - a readable source of unprocessed piece bytes. The piece's commitment will be
/// generated for the bytes read from the source plus any added padding.
/// generated for the bytes read from the source plus any added padding.
/// * `piece_size` - the number of unpadded user-bytes which can be read from source before EOF.
pub fn generate_piece_commitment<T: Read>(
source: T,
Expand Down
2 changes: 1 addition & 1 deletion filecoin-proofs/src/chunk_iter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ impl<R: Read> Iterator for ChunkIterator<R> {
match self.reader.read_many(&mut buffer) {
Ok(bytes_read) if bytes_read == self.chunk_size => Some(Ok(buffer)),
// A position of 0 indicates end of file.
Ok(bytes_read) if bytes_read == 0 => None,
Ok(0) => None,
Ok(bytes_read) => Some(Ok(buffer[..bytes_read].to_vec())),
Err(error) => Some(Err(error)),
}
Expand Down
2 changes: 1 addition & 1 deletion filecoin-proofs/src/types/private_replica_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ impl<Tree: MerkleTreeTrait> Ord for PrivateReplicaInfo<Tree> {

impl<Tree: MerkleTreeTrait> PartialOrd for PrivateReplicaInfo<Tree> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.comm_r.as_ref().partial_cmp(other.comm_r.as_ref())
Some(self.cmp(other))
}
}

Expand Down
10 changes: 7 additions & 3 deletions filecoin-proofs/tests/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -653,7 +653,7 @@ fn test_seal_lifecycle_64gib_porep_id_v1_2_ni_top_8_8_2_api_v1_2() -> Result<()>

#[cfg(feature = "big-tests")]
#[test]
fn test_seal_lifecycle_upgrade_64gib_top_8_8_2_v1_1() -> Result<()> {
fn test_seal_lifecycle_upgrade_64gib_top_8_8_2_v1_2() -> Result<()> {
let porep_config = PoRepConfig::new_groth16(
SECTOR_SIZE_64_GIB,
ARBITRARY_POREP_ID_V1_2_0,
Expand Down Expand Up @@ -1454,15 +1454,15 @@ fn winning_post<Tree: 'static + MerkleTreeTrait>(
assert_eq!(challenged_sectors.len(), sector_count);
assert_eq!(challenged_sectors[0], 0); // with a sector_count of 1, the only valid index is 0

let pub_replicas = vec![(sector_id, PublicReplicaInfo::new(comm_r)?)];
let pub_replicas = [(sector_id, PublicReplicaInfo::new(comm_r)?)];
let private_replica_info =
PrivateReplicaInfo::new(replica.path().into(), comm_r, cache_dir.path().into())?;

/////////////////////////////////////////////
// The following methods of proof generation are functionally equivalent:
// 1)
//
let priv_replicas = vec![(sector_id, private_replica_info.clone())];
let priv_replicas = [(sector_id, private_replica_info.clone())];
let proof = generate_winning_post::<Tree>(&config, &randomness, &priv_replicas[..], prover_id)?;

let valid =
Expand Down Expand Up @@ -2629,6 +2629,7 @@ fn create_seal_for_upgrade<R: Rng, Tree: 'static + MerkleTreeTrait<Hasher = Tree
.read(true)
.write(true)
.create(true)
.truncate(true)
.open(new_sealed_sector_file.path())
.with_context(|| format!("could not open path={:?}", new_sealed_sector_file.path()))?;
f_sealed_sector.set_len(new_replica_target_len)?;
Expand Down Expand Up @@ -2734,6 +2735,7 @@ fn create_seal_for_upgrade<R: Rng, Tree: 'static + MerkleTreeTrait<Hasher = Tree
.read(true)
.write(true)
.create(true)
.truncate(true)
.open(decoded_sector_file.path())
.with_context(|| format!("could not open path={:?}", decoded_sector_file.path()))?;
f_decoded_sector.set_len(decoded_sector_target_len)?;
Expand Down Expand Up @@ -2780,6 +2782,7 @@ fn create_seal_for_upgrade<R: Rng, Tree: 'static + MerkleTreeTrait<Hasher = Tree
.read(true)
.write(true)
.create(true)
.truncate(true)
.open(remove_encoded_file.path())
.with_context(|| format!("could not open path={:?}", remove_encoded_file.path()))?;
f_remove_encoded.set_len(remove_encoded_target_len)?;
Expand Down Expand Up @@ -2895,6 +2898,7 @@ fn create_seal_for_upgrade_aggregation<
.read(true)
.write(true)
.create(true)
.truncate(true)
.open(new_sealed_sector_file.path())
.with_context(|| format!("could not open path={:?}", new_sealed_sector_file.path()))?;
f_sealed_sector.set_len(new_replica_target_len)?;
Expand Down
4 changes: 2 additions & 2 deletions filecoin-proofs/tests/pieces.rs
Original file line number Diff line number Diff line change
Expand Up @@ -266,14 +266,14 @@ fn test_verify_padded_pieces() {
hash(&layer1[10], &layer1[11]), // 4
];

let layer3 = vec![
let layer3 = [
hash(&layer2[0], &layer2[1]), // 8
hash(&layer2[2], &layer2[3]), // 8
layer2[4], // 8
hash(&layer2[5], &layer2[6]), // 8
];

let layer4 = vec![
let layer4 = [
hash(&layer3[0], &layer3[1]), // 16
hash(&layer3[2], &layer3[3]), // 16
];
Expand Down
2 changes: 1 addition & 1 deletion fr32/src/convert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ pub enum Error {
/// Invariants:
/// - Value of each 32-byte chunks MUST represent valid Frs.
/// - Total length must be a multiple of 32.
/// That is to say: each 32-byte chunk taken alone must be a valid Fr32.
/// That is to say: each 32-byte chunk taken alone must be a valid Fr32.
pub type Fr32Vec = Vec<u8>;

/// Array whose little-endian value represents an Fr.
Expand Down
10 changes: 5 additions & 5 deletions fr32/src/padding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -539,7 +539,7 @@ fn clear_right_bits(byte: &mut u8, offset: usize) {
*(byte) &= !((1 << offset) - 1)
}

/** Padding process.
/* Padding process.

Read a `source` of raw byte-aligned data, pad it in a bit stream and
write a byte-aligned version of it in the `target`. The `target` needs
Expand All @@ -563,14 +563,14 @@ need to handle the potential bit-level misalignments:
// offset and num_bytes are based on the unpadded data, so
// if [0, 1, ..., 255] was the original unpadded data, offset 3 and len 4 would return
// [3, 4, 5, 6].
pub fn write_unpadded<W: ?Sized>(
pub fn write_unpadded<W>(
source: &[u8],
target: &mut W,
offset: usize,
len: usize,
) -> io::Result<usize>
where
W: Write,
W: Write + Sized,
{
// Check that there's actually `len` raw data bytes encoded inside
// `source` starting at `offset`.
Expand Down Expand Up @@ -630,15 +630,15 @@ The reader will generally operate with bit precision, even if the padded
layout is byte-aligned (no extra bits) the data inside it isn't (since
we pad at the bit-level).
**/
fn write_unpadded_aux<W: ?Sized>(
fn write_unpadded_aux<W>(
padding_map: &PaddingMap,
source: &[u8],
target: &mut W,
write_pos: usize,
max_write_size: usize,
) -> io::Result<usize>
where
W: Write,
W: Write + Sized,
{
// Position of the reader in the padded bit stream layout, deduced from
// the position of the writer (`write_pos`) in the raw data layout.
Expand Down
2 changes: 1 addition & 1 deletion rust-toolchain
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.70.0
1.83.0
2 changes: 1 addition & 1 deletion storage-proofs-core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ blake2b_simd.workspace = true
blstrs.workspace = true
byteorder.workspace = true
cbc = { version = "0.1.2", features = ["std"] }
config = { version = "0.12.0", default-features = false, features = ["toml"] }
config = { version = "0.14", default-features = false, features = ["toml"] }
cpu-time = { workspace = true, optional = true }
ff.workspace = true
fs2 = "0.4"
Expand Down
2 changes: 1 addition & 1 deletion storage-proofs-core/benches/blake2s.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ struct Blake2sExample<'a> {
data: &'a [Option<bool>],
}

impl<'a> Circuit<Fr> for Blake2sExample<'a> {
impl Circuit<Fr> for Blake2sExample<'_> {
fn synthesize<CS: ConstraintSystem<Fr>>(self, cs: &mut CS) -> Result<(), SynthesisError> {
let data: Vec<Boolean> = self
.data
Expand Down
2 changes: 1 addition & 1 deletion storage-proofs-core/benches/sha256.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ struct Sha256Example<'a> {
data: &'a [Option<bool>],
}

impl<'a> Circuit<Fr> for Sha256Example<'a> {
impl Circuit<Fr> for Sha256Example<'_> {
fn synthesize<CS: ConstraintSystem<Fr>>(self, cs: &mut CS) -> Result<(), SynthesisError> {
let data: Vec<Boolean> = self
.data
Expand Down
2 changes: 1 addition & 1 deletion storage-proofs-core/benches/xor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ struct XorExample<'a> {
data: &'a [Option<bool>],
}

impl<'a> Circuit<Fr> for XorExample<'a> {
impl Circuit<Fr> for XorExample<'_> {
fn synthesize<CS: ConstraintSystem<Fr>>(self, cs: &mut CS) -> Result<(), SynthesisError> {
let key: Vec<Boolean> = self
.key
Expand Down
10 changes: 5 additions & 5 deletions storage-proofs-core/src/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ enum RawData<'a> {
Mmap(MmapMut),
}

impl<'a> Deref for RawData<'a> {
impl Deref for RawData<'_> {
type Target = [u8];

fn deref(&self) -> &Self::Target {
Expand All @@ -32,7 +32,7 @@ impl<'a> Deref for RawData<'a> {
}
}

impl<'a> DerefMut for RawData<'a> {
impl DerefMut for RawData<'_> {
fn deref_mut(&mut self) -> &mut Self::Target {
match self {
RawData::Slice(ref mut raw) => raw,
Expand All @@ -52,7 +52,7 @@ impl<'a> From<&'a mut [u8]> for Data<'a> {
}
}

impl<'a> From<(MmapMut, PathBuf)> for Data<'a> {
impl From<(MmapMut, PathBuf)> for Data<'_> {
fn from(raw: (MmapMut, PathBuf)) -> Self {
let len = raw.0.len();
Data {
Expand All @@ -63,7 +63,7 @@ impl<'a> From<(MmapMut, PathBuf)> for Data<'a> {
}
}

impl<'a> AsRef<[u8]> for Data<'a> {
impl AsRef<[u8]> for Data<'_> {
fn as_ref(&self) -> &[u8] {
match self.raw {
Some(ref raw) => raw,
Expand All @@ -72,7 +72,7 @@ impl<'a> AsRef<[u8]> for Data<'a> {
}
}

impl<'a> AsMut<[u8]> for Data<'a> {
impl AsMut<[u8]> for Data<'_> {
fn as_mut(&mut self) -> &mut [u8] {
match self.raw {
Some(ref mut raw) => raw,
Expand Down
Loading
Loading