diff options
author | Tuowen Zhao <ztuowen@gmail.com> | 2023-10-19 00:04:44 -0700 |
---|---|---|
committer | Tuowen Zhao <ztuowen@gmail.com> | 2023-10-19 00:04:44 -0700 |
commit | 2128f5fe9386dcf2f0597c8035f951c5b60d7562 (patch) | |
tree | ac3ab65a87bd4971275ae91d7b61176eced13774 /server/continuedev/libs/llm/hugging_face.py | |
parent | 08f38574fa2633bbf709d24e1c79417d4285ba61 (diff) | |
download | sncontinue-2128f5fe9386dcf2f0597c8035f951c5b60d7562.tar.gz sncontinue-2128f5fe9386dcf2f0597c8035f951c5b60d7562.tar.bz2 sncontinue-2128f5fe9386dcf2f0597c8035f951c5b60d7562.zip |
cleanup server
Diffstat (limited to 'server/continuedev/libs/llm/hugging_face.py')
-rw-r--r-- | server/continuedev/libs/llm/hugging_face.py | 19 |
1 files changed, 0 insertions, 19 deletions
diff --git a/server/continuedev/libs/llm/hugging_face.py b/server/continuedev/libs/llm/hugging_face.py deleted file mode 100644 index c2e934c0..00000000 --- a/server/continuedev/libs/llm/hugging_face.py +++ /dev/null @@ -1,19 +0,0 @@ -# TODO: This class is far out of date - -from transformers import AutoModelForCausalLM, AutoTokenizer - -from .llm import LLM - - -class HuggingFace(LLM): - def __init__(self, model_path: str = "Salesforce/codegen-2B-mono"): - self.model_path = model_path - self.tokenizer = AutoTokenizer.from_pretrained(model_path) - self.model = AutoModelForCausalLM.from_pretrained(model_path) - - def complete(self, prompt: str, **kwargs): - args = {"max_tokens": 100} - args.update(kwargs) - input_ids = self.tokenizer(prompt, return_tensors="pt").input_ids - generated_ids = self.model.generate(input_ids, max_length=args["max_tokens"]) - return self.tokenizer.decode(generated_ids[0], skip_special_tokens=True) |