replace_layer_fields and AutoLoraConvert not working as expected
edwin0cheng opened this issue · 1 comments
edwin0cheng commented
The proc-macro does not work as expected:
From running :
cd candle-lora-macro/examples
cargo expand --test linear
Output: (Only the `Model` part)
impl Model {
/// Be sure to provide a configuration for each type!
pub fn get_lora_model<'a>(
&'a mut self,
lora_config: candle_lora::LoraConfig,
vb: &candle_nn::VarBuilder,
linear_config: Option<candle_lora::LoraLinearConfig>,
conv1d_config: Option<candle_lora::LoraConv1dConfig>,
conv2d_config: Option<candle_lora::LoraConv2dConfig>,
embed_config: Option<candle_lora::LoraEmbeddingConfig>,
) {
let mut linear: ::std::collections::HashMap<
String,
&dyn candle_lora::LinearLayerLike,
> = ::std::collections::HashMap::new();
let mut conv1d: ::std::collections::HashMap<
String,
&dyn candle_lora::Conv1dLayerLike,
> = ::std::collections::HashMap::new();
let mut conv2d: ::std::collections::HashMap<
String,
&dyn candle_lora::Conv2dLayerLike,
> = ::std::collections::HashMap::new();
let mut embed: ::std::collections::HashMap<
String,
&dyn candle_lora::EmbeddingLayerLike,
> = ::std::collections::HashMap::new();
if !linear.is_empty() && linear_config.is_none() {
{
::core::panicking::panic_fmt(
format_args!("Config not speified for linear layers."),
);
};
}
if !conv1d.is_empty() && conv1d_config.is_none() {
{
::core::panicking::panic_fmt(
format_args!("Config not speified for conv1d layers."),
);
};
}
if !conv2d.is_empty() && conv2d_config.is_none() {
{
::core::panicking::panic_fmt(
format_args!("Config not speified for conv2d layers."),
);
};
}
if !embed.is_empty() && embed_config.is_none() {
{
::core::panicking::panic_fmt(
format_args!("Config not speified for embedding layers."),
);
};
}
let mut builder = candle_lora::SelectedLayersBuilder::new();
if linear_config.is_some() {
builder = builder.add_linear_layers(linear, linear_config.unwrap());
}
if conv1d_config.is_some() {
builder = builder.add_conv1d_layers(conv1d, conv1d_config.unwrap());
}
if conv2d_config.is_some() {
builder = builder.add_conv2d_layers(conv2d, conv2d_config.unwrap());
}
if embed_config.is_some() {
builder = builder.add_embed_layers(embed, embed_config.unwrap());
}
let selection = builder.build();
let new_layers = candle_lora::Lora::convert_model(selection, lora_config, &vb);
let _ = "Start";
let _ = "Done!";
}
/// Be sure to provide a configuration for each type!
pub fn get_merged_lora_model<'a>(
&'a mut self,
lora_config: candle_lora::LoraConfig,
vb: &candle_nn::VarBuilder,
linear_config: Option<candle_lora::LoraLinearConfig>,
conv1d_config: Option<candle_lora::LoraConv1dConfig>,
conv2d_config: Option<candle_lora::LoraConv2dConfig>,
embed_config: Option<candle_lora::LoraEmbeddingConfig>,
) {
use candle_lora::Merge;
let mut linear: ::std::collections::HashMap<
String,
&dyn candle_lora::LinearLayerLike,
> = ::std::collections::HashMap::new();
let mut conv1d: ::std::collections::HashMap<
String,
&dyn candle_lora::Conv1dLayerLike,
> = ::std::collections::HashMap::new();
let mut conv2d: ::std::collections::HashMap<
String,
&dyn candle_lora::Conv2dLayerLike,
> = ::std::collections::HashMap::new();
let mut embed: ::std::collections::HashMap<
String,
&dyn candle_lora::EmbeddingLayerLike,
> = ::std::collections::HashMap::new();
if !linear.is_empty() && linear_config.is_none() {
{
::core::panicking::panic_fmt(
format_args!("Config not speified for linear layers."),
);
};
}
if !conv1d.is_empty() && conv1d_config.is_none() {
{
::core::panicking::panic_fmt(
format_args!("Config not speified for conv1d layers."),
);
};
}
if !conv2d.is_empty() && conv2d_config.is_none() {
{
::core::panicking::panic_fmt(
format_args!("Config not speified for conv2d layers."),
);
};
}
if !embed.is_empty() && embed_config.is_none() {
{
::core::panicking::panic_fmt(
format_args!("Config not speified for embedding layers."),
);
};
}
let mut builder = candle_lora::SelectedLayersBuilder::new();
if linear_config.is_some() {
builder = builder.add_linear_layers(linear, linear_config.unwrap());
}
if conv1d_config.is_some() {
builder = builder.add_conv1d_layers(conv1d, conv1d_config.unwrap());
}
if conv2d_config.is_some() {
builder = builder.add_conv2d_layers(conv2d, conv2d_config.unwrap());
}
if embed_config.is_some() {
builder = builder.add_embed_layers(embed, embed_config.unwrap());
}
let selection = builder.build();
let mut new_layers = candle_lora::Lora::convert_model(
selection,
lora_config,
&vb,
);
}
}
Note that these lines:
candle-lora/candle-lora-macro/src/lib.rs
Lines 547 to 555 in 273eef4
are not expanded.
EricLBuehler commented
Thanks for noticing that! I just identified a bug relating to the use of Box
instead of Arc
during layer swapping, and here is the result after the fix:
pub fn get_lora_model<'a>(
&'a mut self,
lora_config: candle_lora::LoraConfig,
vb: &candle_nn::VarBuilder,
linear_config: Option<candle_lora::LoraLinearConfig>,
conv1d_config: Option<candle_lora::LoraConv1dConfig>,
conv2d_config: Option<candle_lora::LoraConv2dConfig>,
embed_config: Option<candle_lora::LoraEmbeddingConfig>,
) {
let mut linear: ::std::collections::HashMap<
String,
&dyn candle_lora::LinearLayerLike,
> = ::std::collections::HashMap::new();
let mut conv1d: ::std::collections::HashMap<
String,
&dyn candle_lora::Conv1dLayerLike,
> = ::std::collections::HashMap::new();
let mut conv2d: ::std::collections::HashMap<
String,
&dyn candle_lora::Conv2dLayerLike,
> = ::std::collections::HashMap::new();
let mut embed: ::std::collections::HashMap<
String,
&dyn candle_lora::EmbeddingLayerLike,
> = ::std::collections::HashMap::new();
[(linear.insert("a".to_string(), &*self.a))];