赞
踩
- from transformers import AutoTokenizer
- from transformers import AutoModelForCausalLM
- import transformers
- import torch
- import os
-
-
- os.environ["HF_TOKEN"] = '*******'
- # 设置环境变量,用于存储Hugging Face的访问令牌
-
- model='meta-llama/Meta-Llama-3-8B'
- # 定义模型名称
-
- tokenizer=AutoTokenizer.from_pretrained(model)
- # 使用预训练模型名称加载分词器
-
- llama=AutoModelForCausalLM.from_pretrained(model, device_map="cuda:1")
- # 使用预训练模型名称加载因果语言模型,并将其加载到指定的GPU设备上
- llama.device
- #device(type='cuda', index=1)
- import time
-
- begin=time.time()
-
- input_text = "Write me a poem about maching learning."
-
- input_ids = tokenizer(input_text, return_tensors="pt").to(llama.device)
-
- outputs = llama.generate(**input_ids)
-
-
- print(tokenizer.decode(outputs[0]))
-
- end=time.time()
- print(end-begin)
- '''
- <|begin_of_text|>Write me a poem about maching learning. I will use it for a project in my class. You can use whatever words you want. I will use it for a project in my class. You can use whatever words you want.<|end_of_text|>
- 1.718801736831665
- '''
- llama
- LlamaForCausalLM(
- (model): LlamaModel(
- (embed_tokens): Embedding(128256, 4096)
- (layers): ModuleList(
- (0-31): 32 x LlamaDecoderLayer(
- (self_attn): LlamaSdpaAttention(
- (q_proj): Linear(in_features=4096, out_features=4096, bias=False)
- (k_proj): Linear(in_features=4096, out_features=1024, bias=False)
- (v_proj): Linear(in_features=4096, out_features=1024, bias=False)
- (o_proj): Linear(in_features=4096, out_features=4096, bias=False)
- (rotary_emb): LlamaRotaryEmbedding()
- )
- (mlp): LlamaMLP(
- (gate_proj): Linear(in_features=4096, out_features=14336, bias=False)
- (up_proj): Linear(in_features=4096, out_features=14336, bias=False)
- (down_proj): Linear(in_features=14336, out_features=4096, bias=False)
- (act_fn): SiLU()
- )
- (input_layernorm): LlamaRMSNorm()
- (post_attention_layernorm): LlamaRMSNorm()
- )
- )
- (norm): LlamaRMSNorm()
- )
- (lm_head): Linear(in_features=4096, out_features=128256, bias=False)
- )
- 1
-
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。