Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor: error handling #36

Draft
wants to merge 4 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 5 additions & 2 deletions crates/lsp-ai/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,16 @@ tokio = { version = "1.36.0", features = ["rt-multi-thread", "time"] }
indexmap = "2.2.5"
async-trait = "0.1.78"
tree-sitter = "0.22"
utils-tree-sitter = { path = "../utils-tree-sitter", features = ["all"], version = "0.1.0" }
utils-tree-sitter = { path = "../utils-tree-sitter", features = [
"all",
], version = "0.1.0" }
splitter-tree-sitter = { path = "../splitter-tree-sitter", version = "0.1.0" }
text-splitter = { version = "0.13.3" }
md5 = "0.7.0"
thiserror = "1"

[build-dependencies]
cc="*"
cc = "*"

[features]
default = []
Expand Down
46 changes: 27 additions & 19 deletions crates/lsp-ai/src/config.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,21 @@
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;

#[derive(thiserror::Error, Debug)]
pub(crate) enum ConfigError {
#[error("completion is disabled")]
CompletionDisabled,
#[error("`{0}` model not found in `models` config")]
ModelNotFound(String),
#[error("lsp-ai does not currently provide a default configuration. Please pass a configuration. See https://github.com/SilasMarvin/lsp-ai for configuration options and examples")]
NoDefaultConfig,
#[error("server configuration must be a valid JSON object")]
NotJson,
#[error("serde json error: {0}")]
SerdeJson(#[from] serde_json::Error),
}

pub(crate) type Kwargs = HashMap<String, Value>;

const fn max_requests_per_second_default() -> f32 {
Expand Down Expand Up @@ -315,15 +328,15 @@ pub struct Config {
}

impl Config {
pub fn new(mut args: Value) -> Result<Self> {
pub fn new(mut args: Value) -> Result<Self, ConfigError> {
// Validate that the models specfied are there so we can unwrap
let configuration_args = args
.as_object_mut()
.context("Server configuration must be a JSON object")?
.ok_or(ConfigError::NotJson)?
.remove("initializationOptions");
let valid_args = match configuration_args {
Some(configuration_args) => serde_json::from_value(configuration_args)?,
None => anyhow::bail!("lsp-ai does not currently provide a default configuration. Please pass a configuration. See https://github.com/SilasMarvin/lsp-ai for configuration options and examples"),
None => return Err(ConfigError::NoDefaultConfig),
};
let client_params: ValidClientParams = serde_json::from_value(args)?;
Ok(Self {
Expand All @@ -344,24 +357,19 @@ impl Config {
self.config.completion.as_ref().map(|x| &x.post_process)
}

pub fn get_completion_transformer_max_requests_per_second(&self) -> anyhow::Result<f32> {
pub fn get_completion_transformer_max_requests_per_second(&self) -> Result<f32, ConfigError> {
let completion_model = &self
.config
.completion
.as_ref()
.ok_or(ConfigError::CompletionDisabled)?
.model;
match &self
.config
.models
.get(
&self
.config
.completion
.as_ref()
.context("Completions is not enabled")?
.model,
)
.with_context(|| {
format!(
"`{}` model not found in `models` config",
&self.config.completion.as_ref().unwrap().model
)
})? {
.get(completion_model)
.ok_or_else(|| ConfigError::ModelNotFound(completion_model.to_owned()))?
{
#[cfg(feature = "llama_cpp")]
ValidModel::LLaMACPP(llama_cpp) => Ok(llama_cpp.max_requests_per_second),
ValidModel::OpenAI(open_ai) => Ok(open_ai.max_requests_per_second),
Expand Down
10 changes: 8 additions & 2 deletions crates/lsp-ai/src/crawl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@ use tracing::{error, instrument};

use crate::config::{self, Config};

#[derive(thiserror::Error, Debug)]
pub(crate) enum CrawlError {
#[error("Skipping crawling as root_uri does not begin with file://: {0}")]
InvalidRootUri(String),
}

pub(crate) struct Crawl {
crawl_config: config::Crawl,
config: Config,
Expand All @@ -26,14 +32,14 @@ impl Crawl {
&mut self,
triggered_file: Option<String>,
mut f: impl FnMut(&config::Crawl, &str) -> anyhow::Result<bool>,
) -> anyhow::Result<()> {
) -> Result<(), CrawlError> {
if self.crawled_all {
return Ok(());
}

if let Some(root_uri) = &self.config.client_params.root_uri {
if !root_uri.starts_with("file://") {
anyhow::bail!("Skipping crawling as root_uri does not begin with file://")
return Err(CrawlError::InvalidRootUri(root_uri.to_owned()));
}

let extension_to_match = triggered_file
Expand Down
13 changes: 13 additions & 0 deletions crates/lsp-ai/src/error.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
use crate::config::ConfigError;

#[derive(thiserror::Error, Debug)]
pub(crate) enum Error {
#[error("config error: {0}")]
Config(#[from] ConfigError),
#[error("io error: {0}")]
Io(#[from] std::io::Error),
#[error("serde json error: {0}")]
SerdeJson(#[from] serde_json::Error),
}

pub(crate) type Result<T> = std::result::Result<T, Error>;
4 changes: 2 additions & 2 deletions crates/lsp-ai/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use anyhow::Result;

use lsp_server::{Connection, ExtractError, Message, Notification, Request, RequestId};
use lsp_types::{
request::Completion, CompletionOptions, DidChangeTextDocumentParams, DidOpenTextDocumentParams,
Expand All @@ -16,6 +14,7 @@ use tracing_subscriber::{EnvFilter, FmtSubscriber};
mod config;
mod crawl;
mod custom_requests;
mod error;
mod memory_backends;
mod memory_worker;
mod splitters;
Expand All @@ -27,6 +26,7 @@ mod utils;

use config::Config;
use custom_requests::generation::Generation;
use error::Result;
use memory_backends::MemoryBackend;
use transformer_backends::TransformerBackend;
use transformer_worker::{CompletionRequest, GenerationRequest, WorkerRequest};
Expand Down
Loading