mirror of
https://github.com/huggingface/peft.git
synced 2025-10-20 23:43:47 +08:00
Compare commits
1 Commits
patch-rele
...
v0.15.2
Author | SHA1 | Date | |
---|---|---|---|
3c7b6e7f02 |
2
setup.py
2
setup.py
@ -15,7 +15,7 @@
|
|||||||
from setuptools import find_packages, setup
|
from setuptools import find_packages, setup
|
||||||
|
|
||||||
|
|
||||||
VERSION = "0.15.1"
|
VERSION = "0.15.2"
|
||||||
|
|
||||||
extras = {}
|
extras = {}
|
||||||
extras["quality"] = [
|
extras["quality"] = [
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
__version__ = "0.15.1"
|
__version__ = "0.15.2"
|
||||||
|
|
||||||
from .auto import (
|
from .auto import (
|
||||||
MODEL_TYPE_TO_PEFT_MODEL_MAPPING,
|
MODEL_TYPE_TO_PEFT_MODEL_MAPPING,
|
||||||
|
@ -953,7 +953,7 @@ class PeftModel(PushToHubMixin, torch.nn.Module):
|
|||||||
else:
|
else:
|
||||||
self.modules_to_save.update(peft_config.modules_to_save)
|
self.modules_to_save.update(peft_config.modules_to_save)
|
||||||
# this may add a new ModulesToSaveWrapper
|
# this may add a new ModulesToSaveWrapper
|
||||||
_set_trainable(self, adapter_name, module_names=peft_config.modules_to_save)
|
_set_trainable(self, adapter_name, module_names=getattr(peft_config, "modules_to_save", None))
|
||||||
|
|
||||||
if getattr(peft_config, "trainable_token_indices", None) is not None:
|
if getattr(peft_config, "trainable_token_indices", None) is not None:
|
||||||
if isinstance(peft_config.trainable_token_indices, dict):
|
if isinstance(peft_config.trainable_token_indices, dict):
|
||||||
@ -1497,13 +1497,14 @@ class PeftModelForSequenceClassification(PeftModel):
|
|||||||
else:
|
else:
|
||||||
peft_config.modules_to_save.extend(classifier_module_names)
|
peft_config.modules_to_save.extend(classifier_module_names)
|
||||||
|
|
||||||
for name, _ in self.base_model.named_children():
|
if self.modules_to_save:
|
||||||
if any(module_name in name for module_name in self.modules_to_save):
|
for name, _ in self.base_model.named_children():
|
||||||
self.cls_layer_name = name
|
if any(module_name in name for module_name in self.modules_to_save):
|
||||||
break
|
self.cls_layer_name = name
|
||||||
|
break
|
||||||
|
|
||||||
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
|
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
|
||||||
_set_trainable(self, adapter_name, module_names=peft_config.modules_to_save)
|
_set_trainable(self, adapter_name, module_names=getattr(peft_config, "modules_to_save", None))
|
||||||
|
|
||||||
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
|
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
|
||||||
"""
|
"""
|
||||||
@ -2288,13 +2289,14 @@ class PeftModelForTokenClassification(PeftModel):
|
|||||||
else:
|
else:
|
||||||
peft_config.modules_to_save.extend(classifier_module_names)
|
peft_config.modules_to_save.extend(classifier_module_names)
|
||||||
|
|
||||||
for name, _ in self.base_model.named_children():
|
if self.modules_to_save is not None:
|
||||||
if any(module_name in name for module_name in self.modules_to_save):
|
for name, _ in self.base_model.named_children():
|
||||||
self.cls_layer_name = name
|
if any(module_name in name for module_name in self.modules_to_save):
|
||||||
break
|
self.cls_layer_name = name
|
||||||
|
break
|
||||||
|
|
||||||
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
|
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
|
||||||
_set_trainable(self, adapter_name, module_names=peft_config.modules_to_save)
|
_set_trainable(self, adapter_name, module_names=getattr(peft_config, "modules_to_save", None))
|
||||||
|
|
||||||
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
|
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
|
||||||
"""
|
"""
|
||||||
@ -2515,7 +2517,7 @@ class PeftModelForQuestionAnswering(PeftModel):
|
|||||||
break
|
break
|
||||||
|
|
||||||
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
|
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
|
||||||
_set_trainable(self, adapter_name, module_names=peft_config.modules_to_save)
|
_set_trainable(self, adapter_name, module_names=getattr(peft_config, "modules_to_save", None))
|
||||||
|
|
||||||
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
|
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -749,6 +749,9 @@ def _set_trainable(
|
|||||||
if wrapper_cls is None:
|
if wrapper_cls is None:
|
||||||
wrapper_cls = ModulesToSaveWrapper
|
wrapper_cls = ModulesToSaveWrapper
|
||||||
|
|
||||||
|
if module_names is None:
|
||||||
|
return
|
||||||
|
|
||||||
trainable_modules = []
|
trainable_modules = []
|
||||||
found_modules = set()
|
found_modules = set()
|
||||||
# disable removal of duplicates to support targeting tied weights
|
# disable removal of duplicates to support targeting tied weights
|
||||||
|
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user