-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathrun.py
98 lines (76 loc) · 3.18 KB
/
run.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
from openai import OpenAI # Importing the OpenAI library
import re # Importing the regular expression library
from txt2html import txt2html # Importing the custom function to convert text to HTML
import os
# Establishing connection with the local LMStudio server
client = OpenAI(base_url="http://localhost:1234/v1", api_key="not-needed")
filelog = "" # Initializing a variable to log the conversation
# Path of the folder to create
folder_path = "outpu"
# Check if the folder does not already exist
if not os.path.exists(folder_path):
# Create the folder
os.makedirs(folder_path)
# Prompting the user to enter a request for a tutorial
user_input = input("Enter a request for a tutorial: ")
filelog += f"{user_input}\n\n" # Logging user input
try :
filename = user_input
except:
filename = user_input[:50] + ".txt" # Generating a filename from user input
print(user_input)
# Creating a completion with the user request
completion = client.chat.completions.create(
model="local-model",
messages=[
{"role": "system", "content": "Break down the response into concise tasks with clear titles, each starting with Task 1:, Task 2:, and so forth, avoiding excessive detail."},
{"role": "user", "content": user_input}
],
temperature=0.7,
)
# Getting the response generated by the model
response = completion.choices[0].message.content
print((response))
print("-----------------------------")
# Creating a list to store the steps
step_list = []
# Using regular expressions to extract tasks
step_list = re.findall(r'\*\*Task \d+:.*?(?=\*\*Task \d+:|\Z)', response, re.DOTALL)
# Displaying the extracted tasks
for step in step_list:
filelog += step + "\n"
print(step + "\n")
filelog += "\n\n\n"
# Displaying the extracted tasks
for step in step_list:
print()
print(step + "\n")
user_input = step
# Creating an initial story
history = [
{"role": "system", "content": "Brimming with intelligence, you serve as an esteemed assistant, known for imparting invaluable insights and guidance"},
{"role": "user", "content": user_input},
]
while True:
if history[-1]["content"]: # Checking if the content of the last message is not empty
completion = client.chat.completions.create(
model="local-model",
messages=history,
temperature=0.7,
stream=True,
)
new_message = {"role": "assistant", "content": ""}
for chunk in completion:
if chunk.choices[0].delta.content:
print(chunk.choices[0].delta.content, end="", flush=True)
new_message["content"] += chunk.choices[0].delta.content
history.append(new_message)
filelog += new_message["content"] + '\n\n\n'
else:
break # If the content is empty, exit the loop
filename = os.path.join(folder_path, filename)
# Writing the conversation log to a file
with open(filename, "w", encoding="utf-8") as file:
file.write(filelog)
# Converting the conversation log to HTML
txt2html(filename)