sugiv commited on
Commit
d959c89
1 Parent(s): 78af7ce

Leetmonkey In Action. Darn LeetMonkey these days

Browse files
Files changed (1) hide show
  1. app.py +25 -12
app.py CHANGED
@@ -56,8 +56,7 @@ generation_kwargs = {
56
  "temperature": 0.2,
57
  "top_k": 50,
58
  "top_p": 0.95,
59
- "repeat_penalty": 1.1,
60
- "stream": True
61
  }
62
 
63
  def generate_solution(instruction, model):
@@ -75,15 +74,8 @@ Here's the complete Python function implementation:
75
  ```python
76
  """
77
 
78
- generated_text = ""
79
- for chunk in model(full_prompt, stream=True, **generation_kwargs):
80
- token = chunk["choices"][0]["text"]
81
- generated_text += token
82
- yield generated_text
83
-
84
- formatted_code = extract_and_format_code(generated_text)
85
- yield formatted_code
86
-
87
 
88
  def extract_and_format_code(text):
89
  # Extract code between triple backticks
@@ -144,8 +136,29 @@ def stream_solution(problem, model_name):
144
  model = Llama(model_path=model_path, n_ctx=2048, n_threads=4, n_gpu_layers=0, verbose=False)
145
 
146
  logger.info(f"Generating solution using {model_name} model")
147
- for generated_text in generate_solution(problem, model):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
148
  yield generated_text
 
 
 
 
149
 
150
  with gr.Blocks() as demo:
151
  gr.Markdown("# LeetCode Problem Solver")
 
56
  "temperature": 0.2,
57
  "top_k": 50,
58
  "top_p": 0.95,
59
+ "repeat_penalty": 1.1
 
60
  }
61
 
62
  def generate_solution(instruction, model):
 
74
  ```python
75
  """
76
 
77
+ response = model(full_prompt, **generation_kwargs)
78
+ return response["choices"][0]["text"]
 
 
 
 
 
 
 
79
 
80
  def extract_and_format_code(text):
81
  # Extract code between triple backticks
 
136
  model = Llama(model_path=model_path, n_ctx=2048, n_threads=4, n_gpu_layers=0, verbose=False)
137
 
138
  logger.info(f"Generating solution using {model_name} model")
139
+ system_prompt = "You are a Python coding assistant specialized in solving LeetCode problems. Provide only the complete implementation of the given function. Ensure proper indentation and formatting. Do not include any explanations or multiple solutions."
140
+ full_prompt = f"""### Instruction:
141
+ {system_prompt}
142
+
143
+ Implement the following function for the LeetCode problem:
144
+
145
+ {problem}
146
+
147
+ ### Response:
148
+ Here's the complete Python function implementation:
149
+
150
+ ```python
151
+ """
152
+
153
+ generated_text = ""
154
+ for chunk in model(full_prompt, stream=True, **generation_kwargs):
155
+ token = chunk["choices"][0]["text"]
156
+ generated_text += token
157
  yield generated_text
158
+
159
+ formatted_code = extract_and_format_code(generated_text)
160
+ logger.info("Solution generated successfully")
161
+ yield formatted_code
162
 
163
  with gr.Blocks() as demo:
164
  gr.Markdown("# LeetCode Problem Solver")