Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
ede5c46
ci(lint): add clippy profile for string safety lints
amitksingh1490 Apr 14, 2026
3ae1f49
Merge branch 'main' into clippy-warning-unsafe-char-boundary
amitksingh1490 Apr 14, 2026
756553d
[autofix.ci] apply automated fixes
autofix-ci[bot] Apr 14, 2026
e483b7f
ci(lint): conditionally include --all-targets based on clippy profile
amitksingh1490 Apr 14, 2026
d031c6e
[autofix.ci] apply automated fixes
autofix-ci[bot] Apr 14, 2026
df005ce
refactor(ci): streamline clippy commands and enhance linting workflow
amitksingh1490 Apr 14, 2026
10634da
refactor(lint): simplify clippy command structure and ensure all targ…
amitksingh1490 Apr 14, 2026
988d1fe
[autofix.ci] apply automated fixes
autofix-ci[bot] Apr 14, 2026
8e5b45b
fix(ci): adjust clippy string safety command to disable additional ch…
amitksingh1490 Apr 14, 2026
fabf65f
fix: resolve clippy warnings for unsafe char boundary access
amitksingh1490 Apr 14, 2026
5d2cc75
[autofix.ci] apply automated fixes
autofix-ci[bot] Apr 14, 2026
95c28f7
Merge branch 'main' into clippy-warning-unsafe-char-boundary
amitksingh1490 Apr 14, 2026
1ea264f
test: add tests for Cyrillic text handling in wrap_pasted_text function
amitksingh1490 Apr 14, 2026
cdefe9a
[autofix.ci] apply automated fixes
autofix-ci[bot] Apr 14, 2026
a2a97ed
feat: implement UIError enum for improved error handling in UI operat…
amitksingh1490 Apr 14, 2026
4e77ff3
[autofix.ci] apply automated fixes
autofix-ci[bot] Apr 14, 2026
5319837
fix: improve header retrieval in Porcelain implementation to handle e…
amitksingh1490 Apr 14, 2026
a3f6571
fix(compact): add error logging for out-of-bounds compaction range
amitksingh1490 Apr 14, 2026
c8e626f
Merge branch 'main' into clippy-warning-unsafe-char-boundary
amitksingh1490 Apr 14, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .github/workflows/autofix.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,9 @@ jobs:
- name: Cargo Fmt
run: cargo +nightly fmt --all
- name: Cargo Clippy
run: cargo +nightly clippy --all-features --all-targets --workspace --fix --allow-dirty -- -D warnings
run: cargo +nightly clippy --all-features --workspace --all-targets --fix --allow-dirty -- -D warnings
- name: Cargo Clippy String Safety
run: cargo +nightly clippy --all-features --workspace -- -D clippy::string_slice -D clippy::indexing_slicing
- name: Autofix
uses: autofix-ci/action@7a166d7532b277f34e16238930461bf77f9d7ed8
concurrency:
Expand Down
36 changes: 26 additions & 10 deletions crates/forge_app/src/compact.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,25 @@ impl Compactor {

// The sequence from the original message that needs to be compacted
// Filter out droppable messages (e.g., attachments) from compaction
let compaction_sequence = context.messages[start..=end]
.iter()
.filter(|msg| !msg.is_droppable())
.cloned()
.collect::<Vec<_>>();
let compaction_sequence = context
.messages
.get(start..=end)
.map(|slice| {
slice
.iter()
.filter(|msg| !msg.is_droppable())
.cloned()
.collect::<Vec<_>>()
})
.unwrap_or_else(|| {
tracing::error!(
"Compaction range [{}..={}] out of bounds for {} messages",
start,
end,
context.messages.len()
);
Vec::new()
});

// Create a temporary context for the sequence to generate summary
let sequence_context = Context::default().messages(compaction_sequence.clone());
Expand Down Expand Up @@ -121,11 +135,13 @@ impl Compactor {

// Accumulate usage from all messages in the compaction range before they are
// destroyed
let compacted_usage = context.messages[start..=end]
.iter()
.filter_map(|entry| entry.usage.as_ref())
.cloned()
.reduce(|a, b| a.accumulate(&b));
let compacted_usage = context.messages.get(start..=end).and_then(|slice| {
slice
.iter()
.filter_map(|entry| entry.usage.as_ref())
.cloned()
.reduce(|a, b| a.accumulate(&b))
});

// Replace the range with the summary, transferring the accumulated usage
let mut summary_entry = MessageEntry::from(ContextMessage::user(summary, None));
Expand Down
4 changes: 2 additions & 2 deletions crates/forge_app/src/dto/anthropic/request.rs
Original file line number Diff line number Diff line change
Expand Up @@ -308,9 +308,9 @@ impl Message {
| Content::ToolResult { .. } => Some(idx),
_ => None,
})
&& let Some(content) = self.content.get_mut(last_cacheable_idx)
{
self.content[last_cacheable_idx] =
std::mem::take(&mut self.content[last_cacheable_idx]).cached(true);
*content = std::mem::take(content).cached(true);
}

self
Expand Down
27 changes: 24 additions & 3 deletions crates/forge_ci/src/jobs/lint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,13 @@ fn fmt_base() -> Vec<&'static str> {
vec!["cargo", "+nightly", "fmt", "--all"]
}

/// Base parts for clippy commands
/// Base parts for clippy commands (shared across all clippy invocations).
fn clippy_base() -> Vec<&'static str> {
vec![
"cargo",
"+nightly",
"clippy",
"--all-features",
"--all-targets",
"--workspace",
]
}
Expand All @@ -31,9 +30,10 @@ pub fn fmt_cmd(fix: bool) -> String {
cargo_cmd(&parts)
}

/// Build a cargo clippy command
/// Build a cargo clippy command that checks all targets for general warnings.
pub fn clippy_cmd(fix: bool) -> String {
let mut parts = clippy_base();
parts.push("--all-targets");

if fix {
parts.extend(["--fix", "--allow-dirty"]);
Expand All @@ -43,3 +43,24 @@ pub fn clippy_cmd(fix: bool) -> String {

cargo_cmd(&parts)
}

/// Build a cargo clippy command for UTF-8 and indexing safety lints.
///
/// Excludes test code by omitting `--all-targets`.
pub fn clippy_string_safety_cmd(fix: bool) -> String {
let mut parts = clippy_base();

if fix {
parts.extend(["--fix", "--allow-dirty"]);
}

parts.extend([
"--",
"-D",
"clippy::string_slice",
"-D",
"clippy::indexing_slicing",
]);

cargo_cmd(&parts)
}
3 changes: 3 additions & 0 deletions crates/forge_ci/src/workflows/autofix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@ pub fn generate_autofix_workflow() {
)
.add_step(Step::new("Cargo Fmt").run(jobs::fmt_cmd(true)))
.add_step(Step::new("Cargo Clippy").run(jobs::clippy_cmd(true)))
.add_step(
Step::new("Cargo Clippy String Safety").run(jobs::clippy_string_safety_cmd(false)),
)
.add_step(Step::new("Autofix").uses(
"autofix-ci",
"action",
Expand Down
4 changes: 3 additions & 1 deletion crates/forge_display/src/code.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,9 @@ impl SyntaxHighlighter {
} else {
EmbeddedThemeName::InspiredGithub
};
let theme = &self.theme_set.themes[theme_name.as_name()];
let Some(theme) = self.theme_set.themes.get(theme_name.as_name()) else {
return code.to_string();
};
let mut hl = HighlightLines::new(syntax, theme);

code.lines()
Expand Down
23 changes: 11 additions & 12 deletions crates/forge_display/src/grep.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,20 +39,17 @@ impl<'a> ParsedLine<'a> {
return None;
}

let path = parts.first()?.trim();
let line_num = parts.get(1)?.trim();
let content = parts.get(2)?.trim();

// Validate that path and line number parts are not empty
// and that line number contains only digits
if parts[0].is_empty()
|| parts[1].is_empty()
|| !parts[1].chars().all(|c| c.is_ascii_digit())
{
if path.is_empty() || line_num.is_empty() || !line_num.chars().all(|c| c.is_ascii_digit()) {
return None;
}

Some(Self {
path: parts[0].trim(),
line_num: parts[1].trim(),
content: parts[2].trim(),
})
Some(Self { path, line_num, content })
}
}

Expand Down Expand Up @@ -90,9 +87,11 @@ impl GrepFormat {
|mat| {
format!(
"{}{}{}",
&content[..mat.start()],
style(&content[mat.start()..mat.end()]).yellow().bold(),
&content[mat.end()..]
content.get(..mat.start()).unwrap_or(""),
style(content.get(mat.start()..mat.end()).unwrap_or(""))
.yellow()
.bold(),
content.get(mat.end()..).unwrap_or("")
)
},
),
Expand Down
4 changes: 2 additions & 2 deletions crates/forge_domain/src/attachment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,15 +91,15 @@ impl Attachment {
// Find the next "@[" pattern
if let Some(start_pos) = remaining.find("@[") {
// Move to the position where "@[" starts
remaining = &remaining[start_pos..];
remaining = remaining.get(start_pos..).unwrap_or("");
match FileTag::parse(remaining) {
Ok((next_remaining, file_tag)) => {
tags.push(file_tag);
remaining = next_remaining;
}
Err(_e) => {
// Skip the "@[" since we couldn't parse it
remaining = &remaining[2..];
remaining = remaining.get(2..).unwrap_or("");
}
}
} else {
Expand Down
4 changes: 3 additions & 1 deletion crates/forge_domain/src/merge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@ pub mod vec {
for other_agent in other {
if let Some(&index) = base_map.get(other_agent.key()) {
// If the base contains an agent with the same Key, merge them
base[index].merge(other_agent);
if let Some(base_agent) = base.get_mut(index) {
base_agent.merge(other_agent);
}
} else {
// Otherwise, append the other agent to the base list
base.push(other_agent);
Expand Down
2 changes: 1 addition & 1 deletion crates/forge_domain/src/xml.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ pub fn extract_tag_content<'a>(text: &'a str, tag_name: &str) -> Option<&'a str>
if let Some(end_idx) = text.rfind(&closing_tag) {
let content_start = start_idx + opening_tag.len();
if content_start < end_idx {
return Some(text[content_start..end_idx].trim());
return text.get(content_start..end_idx).map(|s| s.trim());
}
}
}
Expand Down
5 changes: 4 additions & 1 deletion crates/forge_fs/src/read_range.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,10 @@ impl crate::ForgeFS {
let result_content = if start_pos == 0 && end_pos == total_lines - 1 {
content.to_string() // Return full content if requesting entire file
} else {
lines[start_pos as usize..=end_pos as usize].join("\n")
lines
.get(start_pos as usize..=end_pos as usize)
.map(|slice| slice.join("\n"))
.unwrap_or_default()
};

Ok((result_content, info))
Expand Down
5 changes: 4 additions & 1 deletion crates/forge_infra/src/auth/http/anthropic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,10 @@ impl OAuthHttpProvider for AnthropicHttpProvider {
// Anthropic-specific token exchange
let (code, state) = if code.contains('#') {
let parts: Vec<&str> = code.split('#').collect();
(parts[0].to_string(), parts.get(1).map(|s| s.to_string()))
(
parts.first().map(|s| s.to_string()).unwrap_or_default(),
parts.get(1).map(|s| s.to_string()),
)
} else {
(code.to_string(), None)
};
Expand Down
21 changes: 13 additions & 8 deletions crates/forge_infra/src/auth/strategy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,23 +69,28 @@ fn extract_chatgpt_account_id(token: &str) -> Option<String> {
}
use base64::Engine;
let payload = base64::engine::general_purpose::URL_SAFE_NO_PAD
.decode(parts[1])
.decode(parts.get(1)?)
.ok()?;
let claims: serde_json::Value = serde_json::from_slice(&payload).ok()?;

// Try chatgpt_account_id first
if let Some(id) = claims["chatgpt_account_id"].as_str() {
if let Some(id) = claims.get("chatgpt_account_id").and_then(|v| v.as_str()) {
return Some(id.to_string());
}
// Try nested auth claim
if let Some(id) = claims["https://api.openai.com/auth"]["chatgpt_account_id"].as_str() {
if let Some(id) = claims
.get("https://api.openai.com/auth")
.and_then(|v| v.get("chatgpt_account_id"))
.and_then(|v| v.as_str())
{
return Some(id.to_string());
}
// Fall back to organizations[0].id
if let Some(id) = claims["organizations"]
.as_array()
if let Some(id) = claims
.get("organizations")
.and_then(|v| v.as_array())
.and_then(|orgs| orgs.first())
.and_then(|org| org["id"].as_str())
.and_then(|org| org.get("id").and_then(|v| v.as_str()))
{
return Some(id.to_string());
}
Expand Down Expand Up @@ -762,7 +767,7 @@ async fn poll_for_tokens(
.unwrap_or_else(|_| serde_json::json!({"error": "parse_error"}));

// Check for error field first
if let Some(error) = token_response["error"].as_str() {
if let Some(error) = token_response.get("error").and_then(|v| v.as_str()) {
if handle_oauth_error(error).is_ok() {
// Retryable error - continue polling
continue;
Expand All @@ -784,7 +789,7 @@ async fn poll_for_tokens(
let error_response: serde_json::Value = serde_json::from_str(&body_text)
.unwrap_or_else(|_| serde_json::json!({"error": "unknown_error"}));

if let Some(error) = error_response["error"].as_str() {
if let Some(error) = error_response.get("error").and_then(|v| v.as_str()) {
if handle_oauth_error(error).is_ok() {
// Retryable error - sleep and continue
tokio::time::sleep(if error == "slow_down" {
Expand Down
4 changes: 2 additions & 2 deletions crates/forge_infra/src/executor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -195,10 +195,10 @@ async fn stream<A: AsyncReadExt + Unpin, W: Write>(
if n == 0 {
break;
}
writer.write_all(&buff[..n])?;
writer.write_all(buff.get(..n).unwrap_or(&[]))?;
// note: flush is necessary else we get the cursor could not be found error.
writer.flush()?;
output.extend_from_slice(&buff[..n]);
output.extend_from_slice(buff.get(..n).unwrap_or(&[]));
}
}
Ok(output)
Expand Down
20 changes: 11 additions & 9 deletions crates/forge_infra/src/mcp_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -415,15 +415,13 @@ impl ForgeMcpClient {
// Read the HTTP request
let mut buf = vec![0u8; 4096];
let n = stream.read(&mut buf).await?;
let request = String::from_utf8_lossy(&buf[..n]);

// Parse the request line to extract query parameters
let request = String::from_utf8_lossy(buf.get(..n).unwrap_or(&[]));
let first_line = request.lines().next().unwrap_or("");
let path = first_line.split_whitespace().nth(1).unwrap_or("/");

// Parse query parameters
let query_start = path.find('?').unwrap_or(path.len());
let query_string = &path[query_start..];
let query_string = path.get(query_start..).unwrap_or("");
let params: std::collections::HashMap<String, String> =
url::form_urlencoded::parse(query_string.trim_start_matches('?').as_bytes())
.into_owned()
Expand Down Expand Up @@ -647,14 +645,18 @@ pub async fn mcp_auth(server_url: &str, env: &Environment) -> anyhow::Result<()>
use tokio::io::{AsyncReadExt, AsyncWriteExt};
let mut buf = vec![0u8; 4096];
let n = stream.read(&mut buf).await?;
let request = String::from_utf8_lossy(&buf[..n]);
let request = String::from_utf8_lossy(buf.get(..n).unwrap_or(&[]));
let first_line = request.lines().next().unwrap_or("");
let path = first_line.split_whitespace().nth(1).unwrap_or("/");
let query_start = path.find('?').unwrap_or(path.len());
let params: std::collections::HashMap<String, String> =
url::form_urlencoded::parse(path[query_start..].trim_start_matches('?').as_bytes())
.into_owned()
.collect();
let params: std::collections::HashMap<String, String> = url::form_urlencoded::parse(
path.get(query_start..)
.unwrap_or("")
.trim_start_matches('?')
.as_bytes(),
)
.into_owned()
.collect();

let code = params
.get("code")
Expand Down
Loading
Loading