migtissera commited on
Commit
9f4c0a1
·
verified ·
1 Parent(s): ddc2d3e

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +20 -11
README.md CHANGED
@@ -54,7 +54,7 @@ class LLM(object):
54
  model_path,
55
  torch_dtype=torch.bfloat16,
56
  device_map="auto",
57
- load_in_4bit=True,
58
  trust_remote_code=False,
59
  )
60
 
@@ -63,8 +63,8 @@ class LLM(object):
63
  )
64
 
65
  self.terminators = [
66
- self.tokenizer.convert_tokens_to_ids("<|im_end|>"),
67
  self.tokenizer.convert_tokens_to_ids("<|end_of_text|>"),
 
68
  ]
69
 
70
  def generate_text(self, instruction):
@@ -76,7 +76,7 @@ class LLM(object):
76
  "input_ids": tokens,
77
  "top_p": 1.0,
78
  "temperature": 0.75,
79
- "generate_len": 1024,
80
  "top_k": 50,
81
  }
82
 
@@ -104,25 +104,34 @@ class LLM(object):
104
  content = match.group(1).strip()
105
  return content
106
 
 
 
 
 
 
 
 
 
 
107
 
108
- model_path = "migtissera/Tess-R1-12B"
 
109
 
110
  llm = LLM(model_path)
111
 
112
- conversation = f"""<|im_start|>system\nYou are Tess-R1, an advanced AI that was created for complex reasoning. Given a user query, you are able to first create a Chain-of-Thought (CoT) reasoning. Once the CoT is devised, you then proceed to first think about how to answer. While doing this, you have the capability to contemplate on the thought, and also provide alternatives. Once the CoT steps have been thought through, you then respond by creating the final output.<|im_end|>\n<|im_start|>user\n"""
113
 
114
  while True:
115
  user_input = input("You: ")
116
- llm_prompt = f"{conversation}{user_input}<|im_end|>\n<|im_start|>assistant\n"
117
  answer = llm.generate_text(llm_prompt)
118
-
 
119
  try:
120
  answer_output = llm.extract_output(answer)
121
  print("=" * 132)
122
  print(answer_output)
123
- conversation = f"{llm_prompt}{answer_output}<|im_end|>\n<|im_start|>user\n"
124
  except:
125
- print("=" * 132)
126
- print(answer)
127
- conversation = f"{llm_prompt}{answer}<|im_end|>\n<|im_start|>user\n"
128
  ```
 
54
  model_path,
55
  torch_dtype=torch.bfloat16,
56
  device_map="auto",
57
+ load_in_4bit=False,
58
  trust_remote_code=False,
59
  )
60
 
 
63
  )
64
 
65
  self.terminators = [
 
66
  self.tokenizer.convert_tokens_to_ids("<|end_of_text|>"),
67
+ self.tokenizer.convert_tokens_to_ids("<|eot_id|>"),
68
  ]
69
 
70
  def generate_text(self, instruction):
 
76
  "input_ids": tokens,
77
  "top_p": 1.0,
78
  "temperature": 0.75,
79
+ "generate_len": 4096,
80
  "top_k": 50,
81
  }
82
 
 
104
  content = match.group(1).strip()
105
  return content
106
 
107
+ def respond_llama3(self, user_prompt):
108
+ conversation = """<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are Tess, a helful AI assitant. You always provide detailed answers without hesitation. Do not refuse.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n"""
109
+ llm_prompt = f"{conversation}{user_input}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n"
110
+ answer = self.generate_text(llm_prompt)
111
+ try:
112
+ answer_output = self.extract_output(answer)
113
+ return answer_output
114
+ except:
115
+ return answer
116
 
117
+
118
+ model_path = "neurolattice/Tess-R1-Llama-3.1-70B"
119
 
120
  llm = LLM(model_path)
121
 
122
+ conversation = """<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are Tess, a helful AI assitant. You always provide detailed answers without hesitation. Do not refuse.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n"""
123
 
124
  while True:
125
  user_input = input("You: ")
126
+ llm_prompt = f"{conversation}{user_input}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n"
127
  answer = llm.generate_text(llm_prompt)
128
+ print("=" * 132)
129
+ print(answer)
130
  try:
131
  answer_output = llm.extract_output(answer)
132
  print("=" * 132)
133
  print(answer_output)
134
+ conversation = f"{llm_prompt}{answer_output}<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n"
135
  except:
136
+ conversation = f"{llm_prompt}{answer}<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n"
 
 
137
  ```