Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions chia/_tests/core/full_node/test_address_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,7 +560,7 @@ async def check_retrieved_peers(self, wanted_peers: list[ExtendedPeerInfo], addr
# use tmp_path pytest fixture to create a temporary directory
async def test_serialization(self, tmp_path: Path):
addrman = AddressManagerTest()
now = int(math.floor(time.time()))
now = math.floor(time.time())
t_peer1 = TimestampedPeerInfo("250.7.1.1", uint16(8333), uint64(now - 10000))
t_peer2 = TimestampedPeerInfo("1050:0000:0000:0000:0005:0600:300c:326b", uint16(9999), uint64(now - 20000))
t_peer3 = TimestampedPeerInfo("250.7.3.3", uint16(9999), uint64(now - 30000))
Expand All @@ -587,7 +587,7 @@ async def test_serialization(self, tmp_path: Path):
@pytest.mark.anyio
async def test_bad_ip_encoding(self, tmp_path: Path):
addrman = AddressManagerTest()
now = int(math.floor(time.time()))
now = math.floor(time.time())
t_peer1 = TimestampedPeerInfo("250.7.1.1", uint16(8333), uint64(now - 10000))
t_peer2 = TimestampedPeerInfo("1050:0000:0000:0000:0005:0600:300c:326b", uint16(9999), uint64(now - 20000))
t_peer3 = TimestampedPeerInfo("250.7.3.3", uint16(9999), uint64(now - 30000))
Expand Down Expand Up @@ -725,7 +725,7 @@ async def old_serialize(address_manager: AddressManager, peers_file_path: Path)

# create a file with the old serialization, then migrate to new serialization
addrman = AddressManagerTest()
now = int(math.floor(time.time()))
now = math.floor(time.time())
t_peer1 = TimestampedPeerInfo("250.7.1.1", uint16(8333), uint64(now - 10000))
t_peer2 = TimestampedPeerInfo("1050:0000:0000:0000:0005:0600:300c:326b", uint16(9999), uint64(now - 20000))
t_peer3 = TimestampedPeerInfo("250.7.3.3", uint16(9999), uint64(now - 30000))
Expand Down
10 changes: 5 additions & 5 deletions chia/plotting/check_plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,9 +170,9 @@ def process_plot(plot_path: Path, plot_info: PlotInfo, num_start: int, num_end:
challenge = std_hash(i.to_bytes(32, "big"))
# Some plot errors cause get_qualities_for_challenge to throw a RuntimeError
try:
quality_start_time = int(round(time() * 1000))
quality_start_time = round(time() * 1000)
for index, quality_str in enumerate(pr.get_qualities_for_challenge(challenge)):
quality_spent_time = int(round(time() * 1000)) - quality_start_time
quality_spent_time = round(time() * 1000) - quality_start_time
if quality_spent_time > 8000:
log.warning(
f"\tLooking up qualities took: {quality_spent_time} ms. This should be below 8 seconds "
Expand All @@ -183,9 +183,9 @@ def process_plot(plot_path: Path, plot_info: PlotInfo, num_start: int, num_end:

# Other plot errors cause get_full_proof or validate_proof to throw an AssertionError
try:
proof_start_time = int(round(time() * 1000))
proof_start_time = round(time() * 1000)
proof = pr.get_full_proof(challenge, index, parallel_read)
proof_spent_time = int(round(time() * 1000)) - proof_start_time
proof_spent_time = round(time() * 1000) - proof_start_time
if proof_spent_time > 15000:
log.warning(
f"\tFinding proof took: {proof_spent_time} ms. This should be below 15 seconds "
Expand All @@ -207,7 +207,7 @@ def process_plot(plot_path: Path, plot_info: PlotInfo, num_start: int, num_end:
f"{type(e)}: {e} error in proving/verifying for plot {plot_path}. Filepath: {plot_path}"
)
caught_exception = True
quality_start_time = int(round(time() * 1000))
quality_start_time = round(time() * 1000)
except KeyboardInterrupt:
log.warning("Interrupted, closing")
return
Expand Down
6 changes: 3 additions & 3 deletions chia/server/address_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ def get_bucket_position(self, key: int, is_new: bool, nBucket: int) -> int:

def is_terrible(self, now: Optional[int] = None) -> bool:
if now is None:
now = int(math.floor(time.time()))
now = math.floor(time.time())
# never remove things tried in the last minute
if self.last_try > 0 and self.last_try >= now - 60:
return False
Expand All @@ -202,7 +202,7 @@ def is_terrible(self, now: Optional[int] = None) -> bool:

def get_selection_chance(self, now: Optional[int] = None) -> float:
if now is None:
now = int(math.floor(time.time()))
now = math.floor(time.time())
chance = 1.0
since_last_try = max(now - self.last_try, 0)
# deprioritize very recent attempts away
Expand Down Expand Up @@ -734,7 +734,7 @@ def get_peers_(self) -> list[TimestampedPeerInfo]:
return addr

def cleanup(self, max_timestamp_difference: int, max_consecutive_failures: int) -> None:
now = int(math.floor(time.time()))
now = math.floor(time.time())
for bucket in range(NEW_BUCKET_COUNT):
for pos in range(BUCKET_SIZE):
if self.new_matrix[bucket][pos] != -1:
Expand Down
1 change: 0 additions & 1 deletion ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@ ignore = [
"RUF056", # falsy-dict-get-fallback
# Should probably fix this
"RUF029", # unused-async
"RUF046", # unnecessary-cast-to-int
"RUF052", # used-dummy-variable

# Security linter
Expand Down
Loading