import torch from transformers import AutoModelForCausalLM, AutoTokenizer, TextStreamer tokenizer = AutoTokenizer.from_pretrained("stabilityai/stable-code-instruct-3b", trust_remote_code=True) model = AutoModelForCausalLM.from_pretrained("stabilityai/stable-code-instruct-3b", torch_dtype=torch.bfloat16, trust_remote_code=True) model.eval() model = model.cuda()
messages = [ { "role": "system", "content": "You are a helpful and polite assistant", }, { "role": "user", "content": "show me a demo code to connect to aws lambda using golang" }, ]