Auto-fix clippy::collapsible_if violations (#36428)

Release Notes:

- N/A
This commit is contained in:
Piotr Osiewicz 2025-08-19 15:27:24 +02:00 committed by GitHub
parent 9e8ec72bd5
commit 8f567383e4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
281 changed files with 6628 additions and 7089 deletions

View file

@ -149,35 +149,35 @@ pub async fn post_crash(
"crash report"
);
if let Some(kinesis_client) = app.kinesis_client.clone() {
if let Some(stream) = app.config.kinesis_stream.clone() {
let properties = json!({
"app_version": report.header.app_version,
"os_version": report.header.os_version,
"os_name": "macOS",
"bundle_id": report.header.bundle_id,
"incident_id": report.header.incident_id,
"installation_id": installation_id,
"description": description,
"backtrace": summary,
});
let row = SnowflakeRow::new(
"Crash Reported",
None,
false,
Some(installation_id),
properties,
);
let data = serde_json::to_vec(&row)?;
kinesis_client
.put_record()
.stream_name(stream)
.partition_key(row.insert_id.unwrap_or_default())
.data(data.into())
.send()
.await
.log_err();
}
if let Some(kinesis_client) = app.kinesis_client.clone()
&& let Some(stream) = app.config.kinesis_stream.clone()
{
let properties = json!({
"app_version": report.header.app_version,
"os_version": report.header.os_version,
"os_name": "macOS",
"bundle_id": report.header.bundle_id,
"incident_id": report.header.incident_id,
"installation_id": installation_id,
"description": description,
"backtrace": summary,
});
let row = SnowflakeRow::new(
"Crash Reported",
None,
false,
Some(installation_id),
properties,
);
let data = serde_json::to_vec(&row)?;
kinesis_client
.put_record()
.stream_name(stream)
.partition_key(row.insert_id.unwrap_or_default())
.data(data.into())
.send()
.await
.log_err();
}
if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() {
@ -359,34 +359,34 @@ pub async fn post_panic(
"panic report"
);
if let Some(kinesis_client) = app.kinesis_client.clone() {
if let Some(stream) = app.config.kinesis_stream.clone() {
let properties = json!({
"app_version": panic.app_version,
"os_name": panic.os_name,
"os_version": panic.os_version,
"incident_id": incident_id,
"installation_id": panic.installation_id,
"description": panic.payload,
"backtrace": backtrace,
});
let row = SnowflakeRow::new(
"Panic Reported",
None,
false,
panic.installation_id.clone(),
properties,
);
let data = serde_json::to_vec(&row)?;
kinesis_client
.put_record()
.stream_name(stream)
.partition_key(row.insert_id.unwrap_or_default())
.data(data.into())
.send()
.await
.log_err();
}
if let Some(kinesis_client) = app.kinesis_client.clone()
&& let Some(stream) = app.config.kinesis_stream.clone()
{
let properties = json!({
"app_version": panic.app_version,
"os_name": panic.os_name,
"os_version": panic.os_version,
"incident_id": incident_id,
"installation_id": panic.installation_id,
"description": panic.payload,
"backtrace": backtrace,
});
let row = SnowflakeRow::new(
"Panic Reported",
None,
false,
panic.installation_id.clone(),
properties,
);
let data = serde_json::to_vec(&row)?;
kinesis_client
.put_record()
.stream_name(stream)
.partition_key(row.insert_id.unwrap_or_default())
.data(data.into())
.send()
.await
.log_err();
}
if !report_to_slack(&panic) {
@ -518,31 +518,31 @@ pub async fn post_events(
let first_event_at = chrono::Utc::now()
- chrono::Duration::milliseconds(last_event.milliseconds_since_first_event);
if let Some(kinesis_client) = app.kinesis_client.clone() {
if let Some(stream) = app.config.kinesis_stream.clone() {
let mut request = kinesis_client.put_records().stream_name(stream);
let mut has_records = false;
for row in for_snowflake(
request_body.clone(),
first_event_at,
country_code.clone(),
checksum_matched,
) {
if let Some(data) = serde_json::to_vec(&row).log_err() {
request = request.records(
aws_sdk_kinesis::types::PutRecordsRequestEntry::builder()
.partition_key(request_body.system_id.clone().unwrap_or_default())
.data(data.into())
.build()
.unwrap(),
);
has_records = true;
}
}
if has_records {
request.send().await.log_err();
if let Some(kinesis_client) = app.kinesis_client.clone()
&& let Some(stream) = app.config.kinesis_stream.clone()
{
let mut request = kinesis_client.put_records().stream_name(stream);
let mut has_records = false;
for row in for_snowflake(
request_body.clone(),
first_event_at,
country_code.clone(),
checksum_matched,
) {
if let Some(data) = serde_json::to_vec(&row).log_err() {
request = request.records(
aws_sdk_kinesis::types::PutRecordsRequestEntry::builder()
.partition_key(request_body.system_id.clone().unwrap_or_default())
.data(data.into())
.build()
.unwrap(),
);
has_records = true;
}
}
if has_records {
request.send().await.log_err();
}
};
Ok(())

View file

@ -337,8 +337,7 @@ async fn fetch_extensions_from_blob_store(
if known_versions
.binary_search_by_key(&published_version, |known_version| known_version)
.is_err()
{
if let Some(extension) = fetch_extension_manifest(
&& let Some(extension) = fetch_extension_manifest(
blob_store_client,
blob_store_bucket,
extension_id,
@ -346,12 +345,11 @@ async fn fetch_extensions_from_blob_store(
)
.await
.log_err()
{
new_versions
.entry(extension_id)
.or_default()
.push(extension);
}
{
new_versions
.entry(extension_id)
.or_default()
.push(extension);
}
}
}

View file

@ -79,27 +79,27 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
verify_access_token(access_token, user_id, &state.db).await
};
if let Ok(validate_result) = validate_result {
if validate_result.is_valid {
let user = state
.db
.get_user_by_id(user_id)
.await?
.with_context(|| format!("user {user_id} not found"))?;
if let Ok(validate_result) = validate_result
&& validate_result.is_valid
{
let user = state
.db
.get_user_by_id(user_id)
.await?
.with_context(|| format!("user {user_id} not found"))?;
if let Some(impersonator_id) = validate_result.impersonator_id {
let admin = state
.db
.get_user_by_id(impersonator_id)
.await?
.with_context(|| format!("user {impersonator_id} not found"))?;
req.extensions_mut()
.insert(Principal::Impersonated { user, admin });
} else {
req.extensions_mut().insert(Principal::User(user));
};
return Ok::<_, Error>(next.run(req).await);
}
if let Some(impersonator_id) = validate_result.impersonator_id {
let admin = state
.db
.get_user_by_id(impersonator_id)
.await?
.with_context(|| format!("user {impersonator_id} not found"))?;
req.extensions_mut()
.insert(Principal::Impersonated { user, admin });
} else {
req.extensions_mut().insert(Principal::User(user));
};
return Ok::<_, Error>(next.run(req).await);
}
Err(Error::http(

View file

@ -87,10 +87,10 @@ impl Database {
continue;
};
if let Some((_, max_extension_version)) = &max_versions.get(&version.extension_id) {
if max_extension_version > &extension_version {
continue;
}
if let Some((_, max_extension_version)) = &max_versions.get(&version.extension_id)
&& max_extension_version > &extension_version
{
continue;
}
if let Some(constraints) = constraints {
@ -331,10 +331,10 @@ impl Database {
.exec_without_returning(&*tx)
.await?;
if let Ok(db_version) = semver::Version::parse(&extension.latest_version) {
if db_version >= latest_version.version {
continue;
}
if let Ok(db_version) = semver::Version::parse(&extension.latest_version)
&& db_version >= latest_version.version
{
continue;
}
let mut extension = extension.into_active_model();

View file

@ -1321,10 +1321,10 @@ impl Database {
.await?;
let mut connection_ids = HashSet::default();
if let Some(host_connection) = project.host_connection().log_err() {
if !exclude_dev_server {
connection_ids.insert(host_connection);
}
if let Some(host_connection) = project.host_connection().log_err()
&& !exclude_dev_server
{
connection_ids.insert(host_connection);
}
while let Some(collaborator) = collaborators.next().await {

View file

@ -616,10 +616,10 @@ impl Server {
}
}
if let Some(live_kit) = livekit_client.as_ref() {
if delete_livekit_room {
live_kit.delete_room(livekit_room).await.trace_err();
}
if let Some(live_kit) = livekit_client.as_ref()
&& delete_livekit_room
{
live_kit.delete_room(livekit_room).await.trace_err();
}
}
}
@ -1015,47 +1015,47 @@ impl Server {
inviter_id: UserId,
invitee_id: UserId,
) -> Result<()> {
if let Some(user) = self.app_state.db.get_user_by_id(inviter_id).await? {
if let Some(code) = &user.invite_code {
let pool = self.connection_pool.lock();
let invitee_contact = contact_for_user(invitee_id, false, &pool);
for connection_id in pool.user_connection_ids(inviter_id) {
self.peer.send(
connection_id,
proto::UpdateContacts {
contacts: vec![invitee_contact.clone()],
..Default::default()
},
)?;
self.peer.send(
connection_id,
proto::UpdateInviteInfo {
url: format!("{}{}", self.app_state.config.invite_link_prefix, &code),
count: user.invite_count as u32,
},
)?;
}
if let Some(user) = self.app_state.db.get_user_by_id(inviter_id).await?
&& let Some(code) = &user.invite_code
{
let pool = self.connection_pool.lock();
let invitee_contact = contact_for_user(invitee_id, false, &pool);
for connection_id in pool.user_connection_ids(inviter_id) {
self.peer.send(
connection_id,
proto::UpdateContacts {
contacts: vec![invitee_contact.clone()],
..Default::default()
},
)?;
self.peer.send(
connection_id,
proto::UpdateInviteInfo {
url: format!("{}{}", self.app_state.config.invite_link_prefix, &code),
count: user.invite_count as u32,
},
)?;
}
}
Ok(())
}
pub async fn invite_count_updated(self: &Arc<Self>, user_id: UserId) -> Result<()> {
if let Some(user) = self.app_state.db.get_user_by_id(user_id).await? {
if let Some(invite_code) = &user.invite_code {
let pool = self.connection_pool.lock();
for connection_id in pool.user_connection_ids(user_id) {
self.peer.send(
connection_id,
proto::UpdateInviteInfo {
url: format!(
"{}{}",
self.app_state.config.invite_link_prefix, invite_code
),
count: user.invite_count as u32,
},
)?;
}
if let Some(user) = self.app_state.db.get_user_by_id(user_id).await?
&& let Some(invite_code) = &user.invite_code
{
let pool = self.connection_pool.lock();
for connection_id in pool.user_connection_ids(user_id) {
self.peer.send(
connection_id,
proto::UpdateInviteInfo {
url: format!(
"{}{}",
self.app_state.config.invite_link_prefix, invite_code
),
count: user.invite_count as u32,
},
)?;
}
}
Ok(())
@ -1101,10 +1101,10 @@ fn broadcast<F>(
F: FnMut(ConnectionId) -> anyhow::Result<()>,
{
for receiver_id in receiver_ids {
if Some(receiver_id) != sender_id {
if let Err(error) = f(receiver_id) {
tracing::error!("failed to send to {:?} {}", receiver_id, error);
}
if Some(receiver_id) != sender_id
&& let Err(error) = f(receiver_id)
{
tracing::error!("failed to send to {:?} {}", receiver_id, error);
}
}
}
@ -2294,11 +2294,10 @@ async fn update_language_server(
let db = session.db().await;
if let Some(proto::update_language_server::Variant::MetadataUpdated(update)) = &request.variant
&& let Some(capabilities) = update.capabilities.clone()
{
if let Some(capabilities) = update.capabilities.clone() {
db.update_server_capabilities(project_id, request.language_server_id, capabilities)
.await?;
}
db.update_server_capabilities(project_id, request.language_server_id, capabilities)
.await?;
}
let project_connection_ids = db

View file

@ -1162,8 +1162,8 @@ impl RandomizedTest for ProjectCollaborationTest {
Some((project, cx))
});
if !guest_project.is_disconnected(cx) {
if let Some((host_project, host_cx)) = host_project {
if !guest_project.is_disconnected(cx)
&& let Some((host_project, host_cx)) = host_project {
let host_worktree_snapshots =
host_project.read_with(host_cx, |host_project, cx| {
host_project
@ -1235,7 +1235,6 @@ impl RandomizedTest for ProjectCollaborationTest {
);
}
}
}
for buffer in guest_project.opened_buffers(cx) {
let buffer = buffer.read(cx);

View file

@ -198,11 +198,11 @@ pub async fn run_randomized_test<T: RandomizedTest>(
}
pub fn save_randomized_test_plan() {
if let Some(serialize_plan) = LAST_PLAN.lock().take() {
if let Some(path) = plan_save_path() {
eprintln!("saved test plan to path {:?}", path);
std::fs::write(path, serialize_plan()).unwrap();
}
if let Some(serialize_plan) = LAST_PLAN.lock().take()
&& let Some(path) = plan_save_path()
{
eprintln!("saved test plan to path {:?}", path);
std::fs::write(path, serialize_plan()).unwrap();
}
}
@ -290,10 +290,9 @@ impl<T: RandomizedTest> TestPlan<T> {
if let StoredOperation::Client {
user_id, batch_id, ..
} = operation
&& batch_id == current_batch_id
{
if batch_id == current_batch_id {
return Some(user_id);
}
return Some(user_id);
}
None
}));
@ -366,10 +365,9 @@ impl<T: RandomizedTest> TestPlan<T> {
},
applied,
) = stored_operation
&& user_id == &current_user_id
{
if user_id == &current_user_id {
return Some((operation.clone(), applied.clone()));
}
return Some((operation.clone(), applied.clone()));
}
}
None
@ -550,11 +548,11 @@ impl<T: RandomizedTest> TestPlan<T> {
.unwrap();
let pool = server.connection_pool.lock();
for contact in contacts {
if let db::Contact::Accepted { user_id, busy, .. } = contact {
if user_id == removed_user_id {
assert!(!pool.is_user_online(user_id));
assert!(!busy);
}
if let db::Contact::Accepted { user_id, busy, .. } = contact
&& user_id == removed_user_id
{
assert!(!pool.is_user_online(user_id));
assert!(!busy);
}
}
}

View file

@ -130,17 +130,17 @@ impl UserBackfiller {
.and_then(|value| value.parse::<i64>().ok())
.and_then(|value| DateTime::from_timestamp(value, 0));
if rate_limit_remaining == Some(0) {
if let Some(reset_at) = rate_limit_reset {
let now = Utc::now();
if reset_at > now {
let sleep_duration = reset_at - now;
log::info!(
"rate limit reached. Sleeping for {} seconds",
sleep_duration.num_seconds()
);
self.executor.sleep(sleep_duration.to_std().unwrap()).await;
}
if rate_limit_remaining == Some(0)
&& let Some(reset_at) = rate_limit_reset
{
let now = Utc::now();
if reset_at > now {
let sleep_duration = reset_at - now;
log::info!(
"rate limit reached. Sleeping for {} seconds",
sleep_duration.num_seconds()
);
self.executor.sleep(sleep_duration.to_std().unwrap()).await;
}
}