
    iMU                        d dl mZ d dlmZ d dlZd dlmZ ddlmZ ddlm	Z	m
Z
 ddlmZ dd	lmZmZmZ dd
lmZ ddlmZmZmZmZ ddlmZmZ ddlmZmZ ddlmZm Z  ddl!m"Z" ddl#m$Z$m%Z%m&Z&m'Z' ddl(m)Z)m*Z* ddl+m,Z, ddl-m.Z.  e'j^                  e0      Z1 ed       G d dejd                               Z3 G d dejd                        Z4d Z5 ed      d=d       Z6 G d dejd                        Z7dejp                  d e9d!ejp                  fd"Z:	 d>d#ejd                  d$ejp                  d%ejp                  d&ejp                  d'ejp                  dz  d(e;d)e;d*e"e$   fd+Z< ee6       G d, d-ejd                               Z= G d. d/e      Z>e% G d0 d1e              Z?e% G d2 d3e?             Z@e% G d4 d5e?e             ZA G d6 d7ee?      ZB G d8 d9ee?      ZC G d: d;ee?      ZDg d<ZEy)?    )Callable)OptionalN)nn   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hubuse_kernel_func_from_hubuse_kernelized_func)create_causal_mask)GenericForQuestionAnswering GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuplelogging)maybe_autocastmerge_with_config_defaults)capture_outputs   )LlamaConfigRMSNormc                   h     e Zd Zddeddf fdZdej                  dej                  fdZd Z xZ	S )	LlamaRMSNormepsreturnNc                     t         |           t        j                  t	        j
                  |            | _        || _        y)z;
        LlamaRMSNorm is equivalent to T5LayerNorm
        N)super__init__r   	Parametertorchonesweightvariance_epsilon)selfhidden_sizer&   	__class__s      d/mnt/e/genesis-system/.venv/lib/python3.12/site-packages/transformers/models/llama/modeling_llama.pyr*   zLlamaRMSNorm.__init__6   s1     	ll5::k#:; #    hidden_statesc                 "   |j                   }|j                  t        j                        }|j	                  d      j                  dd      }|t        j                  || j                  z         z  }| j                  |j                  |      z  S )N   T)keepdim)	dtypetor,   float32powmeanrsqrtr/   r.   )r0   r5   input_dtypevariances       r3   forwardzLlamaRMSNorm.forward>   sy    #))%((7 $$Q',,R,>%Ht?T?T4T(UU{{]--k:::r4   c                 ^    t        | j                  j                         d| j                   S )Nz, eps=)tupler.   shaper/   )r0   s    r3   
extra_reprzLlamaRMSNorm.extra_reprE   s*    ))*+6$2G2G1HIIr4   )gư>)
__name__
__module____qualname__floatr*   r,   TensorrB   rF   __classcell__r2   s   @r3   r%   r%   4   s7    $ $$ $;U\\ ;ell ;Jr4   r%   c                        e Zd ZU ej                  ed<   ddef fdZe	 	 	 ddedz  de	d   de
dz  ded	ef   fd
       Z ej                         ed               Z xZS )LlamaRotaryEmbeddinginv_freqNconfigc                    t         |           |j                  | _        |j                  | _        || _        | j
                  j                  d   | _        | j                  }| j                  dk7  rt        | j                     } || j
                  |      \  }| _
        | j                  d|d       | j                  d|j                         d       y )N	rope_typedefaultrP   F)
persistentoriginal_inv_freq)r)   r*   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrQ   rope_parametersrS   compute_default_rope_parametersr   attention_scalingregister_bufferclone)r0   rQ   devicerope_init_fnrP   r2   s        r3   r*   zLlamaRotaryEmbedding.__init__L   s    "("@"@$*$B$B!44[A!%!E!E>>Y&.t~~>L+7V+L($(ZeD0(..2BuUr4   r_   ztorch.deviceseq_lenr'   ztorch.Tensorc                    | j                   d   }t        | dd      xs | j                  | j                  z  }d}d|t	        j
                  d|dt        j                        j                  |t        j                        |z  z  z  }||fS )	a  
        Computes the inverse frequencies according to the original RoPE implementation
        Args:
            config ([`~transformers.PreTrainedConfig`]):
                The model configuration.
            device (`torch.device`):
                The device to use for initialization of the inverse frequencies.
            seq_len (`int`, *optional*):
                The current sequence length. Unused for this type of RoPE.
        Returns:
            Tuple of (`torch.Tensor`, `float`), containing the inverse frequencies for the RoPE embeddings and the
            post-processing scaling factor applied to the computed cos/sin (unused in this type of RoPE).
        
rope_thetahead_dimNg      ?r   r7   r:   )r_   r:   )	rZ   getattrr1   num_attention_headsr,   arangeint64r;   rJ   )rQ   r_   ra   basedimattention_factorrP   s          r3   r[   z4LlamaRotaryEmbedding.compute_default_rope_parameters\   s    & %%l3fj$/c63E3EIcIc3c U\\!S!5;;?BB&X]XcXcBdgjjk
 )))r4   c                 N   | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j	                  |j                   
      	j	                  |j                   
      fS # 1 sw Y   AxY w)Nr   r8   r!   mpscpuF)device_typeenabledr7   rk   re   )rP   rJ   expandrE   r;   r_   
isinstancetypestrr   	transposer,   catcosr\   sinr:   )
r0   xposition_idsinv_freq_expandedposition_ids_expandedrp   freqsembry   rz   s
             r3   rB   zLlamaRotaryEmbedding.forwardz   sR    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfkUC 	5&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s   BFF$N)NNN)rG   rH   rI   r,   rK   __annotations__r"   r*   staticmethodr   intrD   rJ   r[   no_gradr   rB   rL   rM   s   @r3   rO   rO   I   s    llV{ V  %)+/"*d"*(* t* 
~u$	%	* *: U]]_<  <r4   rO   c                     | dd| j                   d   dz  f   }| d| j                   d   dz  df   }t        j                  | |fd      S )z*Rotates half the hidden dims of the input..Nr8   r7   rr   )rE   r,   rx   )r{   x1x2s      r3   rotate_halfr      sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r4   rotary_pos_embc                     |j                  |      }|j                  |      }| |z  t        |       |z  z   }||z  t        |      |z  z   }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezer   )qkry   rz   unsqueeze_dimq_embedk_embeds          r3   apply_rotary_pos_embr      sY    & --
&C
--
&C3w;q>C/0G3w;q>C/0GGr4   c                   $     e Zd Z fdZd Z xZS )LlamaMLPc                    t         |           || _        |j                  | _        |j                  | _        t        j                  | j                  | j                  |j                        | _        t        j                  | j                  | j                  |j                        | _	        t        j                  | j                  | j                  |j                        | _
        t        |j                     | _        y )Nbias)r)   r*   rQ   r1   intermediate_sizer   Linearmlp_bias	gate_projup_proj	down_projr   
hidden_actact_fnr0   rQ   r2   s     r3   r*   zLlamaMLP.__init__   s    !--!'!9!94#3#3T5K5KRXRaRabyy!1!143I3IPVP_P_`4#9#94;K;KRXRaRabV../r4   c                     | j                  | j                  | j                  |            | j                  |      z        }|S r   )r   r   r   r   )r0   r{   r   s      r3   rB   zLlamaMLP.forward   s6    NN4;;t~~a/@#ADLLQRO#ST	r4   )rG   rH   rI   r*   rB   rL   rM   s   @r3   r   r      s    0r4   r   r5   n_repr'   c                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r!   N)rE   rs   reshape)r5   r   batchnum_key_value_headsslenrd   s         r3   	repeat_kvr      so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr4   modulequerykeyvalueattention_maskscalingdropoutkwargsc                    t        || j                        }t        || j                        }	t        j                  ||j	                  dd            |z  }
||
|z   }
t
        j                  j                  |
dt        j                        j                  |j                        }
t
        j                  j                  |
|| j                        }
t        j                  |
|	      }|j	                  dd      j                         }||
fS )Nr7   r   r8   )rk   r:   )ptrainingr!   )r   num_key_value_groupsr,   matmulrw   r   
functionalsoftmaxr<   r;   r:   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightsattn_outputs               r3   eager_attention_forwardr      s     3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL!#n4==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$r4   c                       e Zd ZdZdedef fdZ	 	 	 	 ddej                  de	ej                  ej                  f   dz  dej                  dz  d	e
dz  d
ej                  dz  dee   de	ej                  ej                  f   fdZ xZS )LlamaAttentionz=Multi-headed attention from 'Attention Is All You Need' paperrQ   	layer_idxc                 d   t         |           || _        || _        t	        |d|j
                  |j                  z        | _        |j                  |j                  z  | _	        | j                  dz  | _
        |j                  | _        d| _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j                  | j                  z  |j
                  |j                        | _        y )Nrd   g      Tr   )r)   r*   rQ   r   rf   r1   rg   rd   r   r   r   attention_dropout	is_causalr   r   attention_biasq_projk_projv_projo_projr0   rQ   r   r2   s      r3   r*   zLlamaAttention.__init__   sM   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!9ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii&&68J8JQWQfQf
r4   Nr5   position_embeddingsr   past_key_valuescache_positionr   r'   c                    |j                   d d }g |d| j                  }| j                  |      j                  |      j	                  dd      }	| j                  |      j                  |      j	                  dd      }
| j                  |      j                  |      j	                  dd      }|\  }}t        |	|
||      \  }	}
|'|||d}|j                  |
|| j                  |      \  }
}t        j                  | j                  j                  t              } || |	|
||f| j                  sdn| j                   | j"                  d|\  }} |j$                  g |d j'                         }| j)                  |      }||fS )Nr8   r!   r7   )rz   ry   r           )r   r   )rE   rd   r   viewrw   r   r   r   updater   r   get_interfacerQ   _attn_implementationr   r   r   r   r   r   r   )r0   r5   r   r   r   r   r   input_shapehidden_shapequery_statesr   r   ry   rz   cache_kwargsattention_interfacer   r   s                     r3   rB   zLlamaAttention.forward   s    $))#2.88b8$--8{{=166|DNNqRST[[/44\BLLQPQR
{{=166|DNNqRST&S#7jRUWZ#[ j&#&snUL'6'='=j,X\XfXfht'u$J(?(M(MKK,,.E)
 %8	%
  $}}C$2H2HLL	%
 	%
!\ *k));;;;FFHkk+.L((r4   )NNNN)rG   rH   rI   __doc__r"   r   r*   r,   rK   rD   r   
LongTensorr   r   rB   rL   rM   s   @r3   r   r      s    G
{ 
s 
4 IM.2(,26))||)) #5<<#=>E)) t+	))
 )) ((4/)) +,)) 
u||U\\)	*))r4   r   c                   "    e Zd Zdedef fdZ	 	 	 	 	 	 ddej                  dej                  dz  dej                  dz  de	dz  d	e
dz  d
ej                  dz  deej                  ej                  f   dz  dee   dej                  fdZ xZS )LlamaDecoderLayerrQ   r   c                     t         |           |j                  | _        t        ||      | _        t        |      | _        t        |j                  |j                        | _	        t        |j                  |j                        | _
        y )N)rQ   r   r&   )r)   r*   r1   r   	self_attnr   mlpr%   rms_norm_epsinput_layernormpost_attention_layernormr   s      r3   r*   zLlamaDecoderLayer.__init__(  sl    !--'vKF#+F,>,>FDWDWX(4V5G5GVM`M`(a%r4   Nr5   r   r|   r   	use_cacher   r   r   r'   c                     |}	| j                  |      } | j                  d|||||||d|\  }}
|	|z   }|}	| j                  |      }| j                  |      }|	|z   }|S )N)r5   r   r|   r   r   r   r    )r   r   r   r   )r0   r5   r   r|   r   r   r   r   r   residual_s              r3   rB   zLlamaDecoderLayer.forward2  s     !,,];)4>> 	
')%+) 3	
 	
q !=0 !55mD/ =0r4   )NNNFNN)rG   rH   rI   r"   r   r*   r,   rK   r   r   boolrD   r   r   rB   rL   rM   s   @r3   r   r   '  s    b{ bs b /304(,!&26HL|| t+ &&-	
  $; ((4/ #5<<#=>E +, 
r4   r   c                   J    e Zd ZU eed<   dZdZdgZdgZdZ	dZ
dZdZdZeedZy)LlamaPreTrainedModelrQ   modelTr   r   )r5   
attentionsN)rG   rH   rI   r"   r   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsr   r4   r3   r   r   T  sQ    &*#,-#4"5N!"&*$r4   r   c                       e Zd Zdef fdZeee	 	 	 	 	 	 	 ddej                  dz  dej                  dz  dej                  dz  dedz  dej                  dz  d	ej                  dz  d
edz  dee   defd                     Z xZS )
LlamaModelrQ   c           	         t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        t#        |      | _        d| _        | j)                          y c c}w )Nr   rQ   F)r)   r*   pad_token_idpadding_idx
vocab_sizer   	Embeddingr1   embed_tokens
ModuleListrangenum_hidden_layersr   layersr%   r   normrO   
rotary_embgradient_checkpointing	post_initr   s      r3   r*   zLlamaModel.__init__i  s     !.. ++LL):):F<N<NPTP`P`ammCHIaIaCbcivy1c
 !!3!39L9LM	.f=&+# 	 ds   DN	input_idsr   r|   r   inputs_embedsr   r   r   r'   c                 D   |d u |d uz  rt        d      || j                  |      }|r|t        | j                        }|E||j	                         nd}	t        j                  |j                  d   |j                        |	z   }||j                  d      }t        | j                  |||||      }
|}| j                  ||      }| j                  d | j                  j                   D ]  } ||f|
|||||d|} | j                  |      }t        ||	      S )
Nz:You must specify exactly one of input_ids or inputs_embedsr   r   r!   )r_   )rQ   r	  r   r   r   r|   )r|   )r   r   r|   r   r   r   )last_hidden_stater   )
ValueErrorr   r	   rQ   get_seq_lengthr,   rh   rE   r_   r   r   r  r  r  r  r   )r0   r  r   r|   r   r	  r   r   r   past_seen_tokenscausal_maskr5   r   decoder_layers                 r3   rB   zLlamaModel.forwardy  s]    -t";<YZZ *.*;*;I*FM0*$++>O!CRC^==?de]003M<P<PQTdd  )33A6L(;;'))+%
 &"oom,oW![[)H4;;+H+HI 
	M)	*$7) /#-	 	M
	 		-0&++
 	
r4   )NNNNNNN)rG   rH   rI   r"   r*   r   r    r   r,   r   rK   r   FloatTensorr   r   r   r   rB   rL   rM   s   @r3   r   r   g  s    {     .2.204(,2626!%9
##d*9
 t+9
 &&-	9

 9
 ((4/9
 ((4/9
 $;9
 +,9
 
!9
    9
r4   r   c                   b    e Zd ZddiZddiZddgdgfiZ fdZee	 	 	 	 	 	 	 	 	 dd	e	j                  dz  d
e	j                  dz  de	j                  dz  dedz  de	j                  dz  de	j                  dz  dedz  de	j                  dz  dee	j                  z  dee   defd              Z xZS )LlamaForCausalLMzlm_head.weightzmodel.embed_tokens.weightlm_headcolwise_gather_outputr5   logitsc                     t         |   |       t        |      | _        |j                  | _        t        j                  |j                  |j                  d      | _        | j                          y )NFr   )
r)   r*   r   r   r   r   r   r1   r  r  r   s     r3   r*   zLlamaForCausalLM.__init__  sU     '
 ++yy!3!3V5F5FUS 	r4   Nr  r   r|   r   r	  labelsr   r   logits_to_keepr   r'   c
                 z    | j                   d|||||||d|
}|j                  }t        |	t              rt	        |	 d      n|	}| j                  |dd|ddf         }d}|* | j                  d||| j                  j                  d|
}t        |||j                  |j                  |j                        S )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, LlamaForCausalLM

        >>> model = LlamaForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf")
        >>> tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```)r  r   r|   r   r	  r   r   N)r  r  r   )lossr  r   r5   r   r   )r   r  rt   r   slicer  loss_functionrQ   r   r   r   r5   r   )r0   r  r   r|   r   r	  r  r   r   r  r   outputsr5   slice_indicesr  r  s                   r3   rB   zLlamaForCausalLM.forward  s    @ ,64:: 	,
)%+')	,
 	,
  118B>SV8W~ot4]kmA}a,?@A%4%%pVFt{{OeOepiopD%#33!//))
 	
r4   )	NNNNNNNNr   )rG   rH   rI   _tied_weights_keys_tp_plan_pp_planr*   r   r   r,   r   rK   r   r  r   r   r   r   r   rB   rL   rM   s   @r3   r  r    s/   *,GH23H_-z:;H  .2.204(,26*.!%26-.8
##d*8
 t+8
 &&-	8

 8
 ((4/8
   4'8
 $;8
 ((4/8
 ell*8
 +,8
 
 8
  8
r4   r  c                       e Zd Zy)LlamaForSequenceClassificationNrG   rH   rI   r   r4   r3   r$  r$        r4   r$  c                       e Zd ZdZy)LlamaForQuestionAnsweringtransformerN)rG   rH   rI   r   r   r4   r3   r(  r(    s    %r4   r(  c                       e Zd Zy)LlamaForTokenClassificationNr%  r   r4   r3   r+  r+    r&  r4   r+  )r  r   r   r$  r(  r+  )r!   )r   )Fcollections.abcr   typingr   r,   r   activationsr   cache_utilsr   r	   
generationr
   integrationsr   r   r   masking_utilsr   modeling_layersr   r   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   r   utils.genericr   r   utils.output_capturingr    configuration_llamar"   
get_loggerrG   loggerModuler%   rO   r   r   r   rK   r   r   rJ   r   r   r   r   r   r  r$  r(  r+  __all__r   r4   r3   <module>r@     s.  & %    ! . ) f f /  L F & R R G 5 , 
		H	% Y'J299 J (J(><299 ><B( *+ ,2ryy  	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 LL4'% % % '(%2 )*C)RYY C) +C)L*2 *Z ?  $ M
% M
 M
` H
+_ H
 H
V b%EG[ a& ;=Q & \"?AU [r4   