| from transformers import AutoModel, AutoTokenizer | |
| import sentencepiece as spm | |
| from sentencepiece import sentencepiece_model_pb2 as sp_pb2_model | |
| def main(): | |
| tokenizer = AutoTokenizer.from_pretrained('./', trust_remote_code=True) | |
| model = AutoModel.from_pretrained('./', trust_remote_code=True) | |
| # print(f"model: {model}") | |
| model = model.eval() | |
| response, history = model.chat(tokenizer, "你好", history=[], max_new_tokens=16) | |
| print(response) | |
| if __name__ == "__main__": | |
| main() |