Skip to content
This repository was archived by the owner on Jan 2, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 0 additions & 19 deletions server/bleep/src/agent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,25 +45,6 @@ pub enum Error {
Processing(anyhow::Error),
}

/// A unified way to track a collection of repositories
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
pub struct Project(pub Vec<RepoRef>);

impl Project {
/// This is a temporary thing to keep backwards compatibility.
/// We should have a UUID here to track this stuff consistently.
pub fn id(&self) -> String {
self.0
.get(0)
.map(ToString::to_string)
.expect("invalid project configuration")
}

pub fn repos(&self) -> impl Iterator<Item = String> + '_ {
self.0.iter().map(|r| r.display_name())
}
}

pub struct Agent {
pub app: Application,
pub conversation: Conversation,
Expand Down
14 changes: 8 additions & 6 deletions server/bleep/src/indexes/doc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ use std::{collections::HashSet, sync::Arc};
#[derive(Clone)]
pub struct Doc {
sql: SqlDb,
#[allow(unused)]
section_index: tantivy::Index,
section_schema: schema::Section,
index_writer: Arc<Mutex<tantivy::IndexWriter>>,
Expand Down Expand Up @@ -363,12 +364,15 @@ impl Doc {
let _ = tx.send(Progress::Err(error.to_string()));

// send job status to error
self.index_queue
if let Some(job) = self
.index_queue
.write()
.await
.iter_mut()
.find(|job| job.id == id)
.map(|job| job.status = STATUS_ERROR);
{
job.status = STATUS_ERROR;
}

// return error
Err(error)?;
Expand Down Expand Up @@ -452,7 +456,7 @@ impl Doc {
// delete old docs from tantivy
//
// create a checkpoint before deletion, so we can revert to here if the job is cancelled
self.index_writer.lock().await.commit();
let _ = self.index_writer.lock().await.commit();
self.index_writer
.lock()
.await
Expand Down Expand Up @@ -588,9 +592,7 @@ impl Doc {
modified_at: record.modified_at,
});

Ok(queued_item
.or(indexed_item)
.ok_or(Error::InvalidDocId(id))?)
queued_item.or(indexed_item).ok_or(Error::InvalidDocId(id))
}

/// Search for doc source by title
Expand Down
2 changes: 1 addition & 1 deletion server/bleep/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
unused_qualifications
)]
#![warn(unused_crate_dependencies)]
#![allow(elided_lifetimes_in_paths)]
#![allow(elided_lifetimes_in_paths, clippy::diverging_sub_expression)]

#[cfg(all(feature = "onnx", feature = "metal"))]
compile_error!("cannot enable `onnx` and `metal` at the same time");
Expand Down
5 changes: 2 additions & 3 deletions server/bleep/src/query/execute.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use std::{
borrow::Cow,
collections::{HashMap, HashSet},
sync::Arc,
};
Expand Down Expand Up @@ -283,7 +282,7 @@ impl ApiQuery {
for q in queries {
if let Some(r) = q.repo_str() {
// The branch that this project has loaded this repo with.
let project_branch = repo_branches.get(&r).map(Option::as_ref).flatten();
let project_branch = repo_branches.get(&r).and_then(Option::as_ref);

// If the branch doesn't match what we expect, drop the query.
if q.branch_str().as_ref() == project_branch {
Expand All @@ -293,7 +292,7 @@ impl ApiQuery {
for (r, b) in &repo_branches {
out.push(parser::Query {
repo: Some(parser::Literal::from(r)),
branch: b.as_ref().map(|b| parser::Literal::from(b)),
branch: b.as_ref().map(parser::Literal::from),
..q.clone()
});
}
Expand Down
12 changes: 3 additions & 9 deletions server/bleep/src/webserver/answer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,23 +11,20 @@ use axum::{
};
use futures::{future::Either, stream, StreamExt};
use reqwest::StatusCode;
use serde_json::json;
use tracing::{debug, error, info, warn};

use super::conversation::ConversationId;

use super::middleware::User;
use crate::{
agent::{
self,
exchange::{CodeChunk, Exchange, FocusedChunk, RepoPath},
Action, Agent, ExchangeState, Project,
Action, Agent, ExchangeState,
},
analytics::{EventData, QueryEvent},
db::QueryLog,
query::parser::{self, Literal},
repo::RepoRef,
webserver::conversation::{self, Conversation},
webserver::conversation::Conversation,
Application,
};

Expand Down Expand Up @@ -76,10 +73,6 @@ pub struct Answer {
pub conversation_id: Option<i64>,
}

fn default_thread_id() -> uuid::Uuid {
uuid::Uuid::new_v4()
}

fn default_answer_model() -> agent::model::LLMModel {
agent::model::GPT_4_TURBO_24K
}
Expand Down Expand Up @@ -166,6 +159,7 @@ struct AgentExecutor {
action: Action,
}

#[allow(clippy::large_enum_variant)]
#[derive(serde::Serialize)]
enum AnswerEvent {
ChatEvent(Exchange),
Expand Down
14 changes: 5 additions & 9 deletions server/bleep/src/webserver/conversation.rs
Original file line number Diff line number Diff line change
@@ -1,20 +1,16 @@
use anyhow::{Context, Result};
use anyhow::Result;
use axum::{
extract::{Path, Query, State},
response::IntoResponse,
extract::{Path, State},
Extension, Json,
};
use chrono::NaiveDateTime;
use reqwest::StatusCode;
use std::{fmt, mem};
use tracing::info;
use std::mem;
use uuid::Uuid;

use crate::{
agent::{exchange::Exchange, Project},
agent::exchange::Exchange,
db::SqlDb,
repo::RepoRef,
webserver::{self, middleware::User, Error, ErrorKind},
webserver::{self, middleware::User, Error},
Application,
};

Expand Down
6 changes: 1 addition & 5 deletions server/bleep/src/webserver/project.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,8 @@
use std::collections::HashMap;

use crate::{webserver, Application};
use axum::{
extract::{Path, Query},
Extension, Json,
};
use axum::{extract::Path, Extension, Json};
use chrono::NaiveDateTime;
use futures::TryStreamExt;

use super::{middleware::User, repos::Repo, Error};

Expand Down
33 changes: 9 additions & 24 deletions server/bleep/src/webserver/studio.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ pub struct Create {

pub async fn create(
app: Extension<Application>,
user: Extension<User>,
Path(project_id): Path<i64>,
Json(params): Json<Create>,
) -> webserver::Result<String> {
Expand Down Expand Up @@ -811,7 +810,6 @@ pub async fn generate(
Ok(Sse::new(Box::pin(stream)))
}

#[allow(clippy::single_range_in_vec_init)]
async fn generate_llm_context(
app: Application,
context: &[ContextFile],
Expand Down Expand Up @@ -1087,7 +1085,7 @@ pub async fn diff(

for chunk in valid_chunks {
let path = chunk.src.as_deref().or(chunk.dst.as_deref()).unwrap();
let (repo, path) = parse_diff_path(&path)?;
let (repo, path) = parse_diff_path(path)?;
let lang = if let Some(l) = file_langs.get(path) {
Some(l.clone())
} else {
Expand Down Expand Up @@ -1149,21 +1147,6 @@ fn parse_diff_path(p: &str) -> Result<(RepoRef, &str)> {
Ok((repo, path))
}

fn context_repo_branch(context: &[ContextFile]) -> Result<(RepoRef, Option<String>)> {
let (repo, branch) = context
.first()
.map(|cf| (cf.repo.clone(), cf.branch.clone()))
// We make a hard assumption in the design of diffs that a studio can only contain files
// from one repository. This allows us to determine which repository to create new files
// or delete files in, without having to prefix file paths with repository names.
//
// If we can't find *any* files in the context to detect the current repository,
// creating/deleting a file like `index.js` is ambiguous, so we just return an error.
.context("could not determine studio repository, studio didn't contain any files")?;

Ok((repo, branch))
}

async fn rectify_hunks(
app: &Application,
llm_context: &str,
Expand Down Expand Up @@ -1304,7 +1287,7 @@ pub async fn diff_apply(
.map(|row| row.context)
.ok_or_else(studio_not_found)?;

let context =
let _context =
serde_json::from_str::<Vec<ContextFile>>(&context_json).map_err(Error::internal)?;

let diff_chunks = diff::relaxed_parse(&diff);
Expand All @@ -1329,7 +1312,7 @@ pub async fn diff_apply(
let mut file_content = if chunk.src.is_some() {
app.indexes
.file
.by_path(&repo, &path, None)
.by_path(&repo, path, None)
.await?
.context("path did not exist in the index")?
.content
Expand Down Expand Up @@ -1455,7 +1438,6 @@ pub struct Import {
}

/// Returns a new studio ID, or the `?studio_id=...` query param if present.
#[allow(clippy::single_range_in_vec_init)]
pub async fn import(
app: Extension<Application>,
user: Extension<User>,
Expand Down Expand Up @@ -1709,9 +1691,12 @@ pub async fn list_snapshots(
.and_then(|r| {
let app = (*app).clone();
async move {
let context: Vec<ContextFile> = serde_json::from_str(&r.context).context("failed to deserialize context")?;
let doc_context: Vec<DocContextFile> = serde_json::from_str(&r.doc_context).context("failed to deserialize doc context")?;
let messages: Vec<Message> = serde_json::from_str(&r.messages).context("failed to deserialize messages")?;
let context: Vec<ContextFile> =
serde_json::from_str(&r.context).context("failed to deserialize context")?;
let doc_context: Vec<DocContextFile> = serde_json::from_str(&r.doc_context)
.context("failed to deserialize doc context")?;
let messages: Vec<Message> =
serde_json::from_str(&r.messages).context("failed to deserialize messages")?;

let token_counts = token_counts(app, &messages, &context, &doc_context).await?;

Expand Down