-
Notifications
You must be signed in to change notification settings - Fork 295
[AWQ] use match_modules_set and fix logic #2070
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Open
HDCharles
wants to merge
5
commits into
main
Choose a base branch
from
96_awq_match_module_set
base: main
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
Changes from 2 commits
Commits
Show all changes
5 commits
Select commit
Hold shift + click to select a range
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -7,6 +7,7 @@ | |
| from compressed_tensors.utils import ( | ||
| align_modules, | ||
| get_execution_device, | ||
| match_modules_set, | ||
| match_named_modules, | ||
| update_offload_parameter, | ||
| ) | ||
|
|
@@ -312,68 +313,76 @@ def _set_resolved_mappings(self, model: Module) -> None: | |
| into ResolvedMapping objects, resolving regular expressions. | ||
| Result is stored in _resolved_mappings. | ||
|
|
||
| For each activation in the mapping list, we find the corresponding weight to | ||
| balance by searching for the longest substring. For instance, if our balance | ||
| weight is ".*re:.*q_proj" and the activation is "re:.*self_attn_layer_norm" we | ||
| would match model.layer.0.p_proj to model.layer.0.self_attn_layer_norm and | ||
| repeat for model.layer.1 and so on | ||
| Uses match_modules_set to find coherent sets of (smooth_layer, *balance_layers) | ||
| that belong together in the model architecture. | ||
| """ | ||
| # Build a module-to-name mapping for efficient lookups | ||
| module_to_name = {module: name for name, module in model.named_modules()} | ||
|
|
||
| resolved_mappings: list[ResolvedMapping] = [] | ||
| for mapping_idx, mapping in enumerate(self.mappings): | ||
| num_skipped_mappings = 0 | ||
|
|
||
| for smooth_name, smooth_layer in ( | ||
| pbar := tqdm( | ||
| match_named_modules(model, [mapping.smooth_layer], self.ignore) | ||
| ) | ||
| # Use match_modules_set to find coherent sets of modules | ||
| target_patterns = (mapping.smooth_layer, *mapping.balance_layers) | ||
|
|
||
| for modules_set in ( | ||
HDCharles marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| pbar := tqdm(match_modules_set(model, target_patterns, self.ignore)) | ||
HDCharles marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| ): | ||
| pbar.set_description( | ||
| f"Resolving mapping {mapping_idx+1}/{len(self.mappings)}" | ||
| f" ({num_skipped_mappings} skipped)" | ||
| ) | ||
HDCharles marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
|
||
| smooth_parent_name = ".".join(smooth_name.split(".")[:-1]) | ||
| smooth_parent = get_layer_by_name(smooth_parent_name, model) | ||
| # Unpack the matched set: first is smooth_layer, rest are balance_layers | ||
| smooth_layer = modules_set[0] | ||
| all_balance_layers = list(modules_set[1:]) | ||
|
|
||
| balance_layers, balance_names = [], [] | ||
| for balance_regex in mapping.balance_layers: | ||
| # find the submodules that match the activation layer | ||
| for balance_suffix, balance_layer in match_named_modules( | ||
| smooth_parent, [balance_regex], self.ignore | ||
| ): | ||
| balance_name = f"{smooth_parent_name}.{balance_suffix}" | ||
|
|
||
| # exclude v_proj->o_proj mappings whose shapes are incompatible | ||
| # https://github.com/mit-han-lab/llm-awq/pull/67#issuecomment-1681632777 | ||
| if ( | ||
| isinstance(smooth_layer, torch.nn.Linear) | ||
| and isinstance(balance_layer, torch.nn.Linear) | ||
| and balance_name.endswith(".o_proj") | ||
| and ( | ||
| ( | ||
| smooth_name.endswith(".v_proj") | ||
| and smooth_layer.out_features | ||
| != balance_layer.in_features | ||
| ) | ||
| or ( | ||
| smooth_name.endswith(".qkv_proj") | ||
| and smooth_layer.out_features | ||
| != 3 * balance_layer.in_features | ||
| ) | ||
| # Get names using the pre-built mapping | ||
| smooth_name = module_to_name.get(smooth_layer) | ||
| if smooth_name is None: | ||
HDCharles marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| continue | ||
|
|
||
| # Filter balance layers, skipping incompatible ones | ||
| balance_layers = [] | ||
| balance_names = [] | ||
|
|
||
| for balance_layer in all_balance_layers: | ||
| balance_name = module_to_name.get(balance_layer) | ||
| if balance_name is None: | ||
| continue | ||
|
|
||
| # exclude v_proj->o_proj mappings whose shapes are incompatible | ||
| # https://github.com/mit-han-lab/llm-awq/pull/67#issuecomment-1681632777 | ||
| if ( | ||
| isinstance(smooth_layer, torch.nn.Linear) | ||
| and isinstance(balance_layer, torch.nn.Linear) | ||
| and balance_name.endswith(".o_proj") | ||
| and ( | ||
| ( | ||
| smooth_name.endswith(".v_proj") | ||
| and smooth_layer.out_features | ||
| != balance_layer.in_features | ||
| ) | ||
| or ( | ||
| smooth_name.endswith(".qkv_proj") | ||
| and smooth_layer.out_features | ||
| != 3 * balance_layer.in_features | ||
| ) | ||
| ): | ||
| num_skipped_mappings += 1 | ||
| continue | ||
| ) | ||
| ): | ||
| num_skipped_mappings += 1 | ||
| continue | ||
|
||
|
|
||
| balance_layers.append(balance_layer) | ||
| balance_names.append(balance_name) | ||
| balance_layers.append(balance_layer) | ||
| balance_names.append(balance_name) | ||
|
|
||
| if len(balance_layers) == 0: | ||
| continue | ||
|
|
||
| elif len(balance_layers) == 1: | ||
| if len(balance_layers) == 1: | ||
| # for single balance layer, parent is the balance layer | ||
| parent_name, parent = balance_name, balance_layer | ||
| parent_name, parent = balance_names[0], balance_layers[0] | ||
HDCharles marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| else: | ||
| # for multiple balance layers, find lowest common parent | ||
| parent_name, parent = get_lowest_common_parent(balance_names, model) | ||
|
|
||
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.