Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -61,7 +61,7 @@ if btn:
|
|
| 61 |
theme,
|
| 62 |
topic,
|
| 63 |
)
|
| 64 |
-
content.append(complete_with_gpt(prmpt, 200, "meta-llama/Llama-3.2-
|
| 65 |
if x == 1:
|
| 66 |
prmpt = story_setting_prompt(
|
| 67 |
genre,
|
|
@@ -76,7 +76,7 @@ if btn:
|
|
| 76 |
)
|
| 77 |
previous = " ".join(x for x in content)
|
| 78 |
prmpt = previous + " " + prmpt
|
| 79 |
-
content.append(complete_with_gpt(prmpt, 200, "meta-llama/Llama-3.2-
|
| 80 |
|
| 81 |
if x % 3 == 0:
|
| 82 |
prmpt = supporting_character_inclusion(
|
|
@@ -87,16 +87,16 @@ if btn:
|
|
| 87 |
)
|
| 88 |
previous = " ".join(x for x in content)
|
| 89 |
prmpt = previous + " " + prmpt
|
| 90 |
-
content.append(complete_with_gpt(prmpt, 200, "meta-llama/Llama-3.2-
|
| 91 |
if x == chapters - 1:
|
| 92 |
prmpt = ending_scene(genre, resolution, main_name, positive_messaging)
|
| 93 |
previous = " ".join(x for x in content)
|
| 94 |
prmpt = previous + " " + prmpt
|
| 95 |
-
content.append(complete_with_gpt(prmpt, 200, "meta-llama/Llama-3.2-
|
| 96 |
else:
|
| 97 |
previous = " ".join(x for x in content)
|
| 98 |
prmpt = previous
|
| 99 |
-
content.append(complete_with_gpt(prmpt, 200, "meta-llama/Llama-3.2-
|
| 100 |
|
| 101 |
st.write(content)
|
| 102 |
filenamee = to_pdf(convert(create_md(text=content, title=title)))
|
|
|
|
| 61 |
theme,
|
| 62 |
topic,
|
| 63 |
)
|
| 64 |
+
content.append(complete_with_gpt(prmpt, 200, "meta-llama/Llama-3.2-3B", 1500, 0.7, 1.5))
|
| 65 |
if x == 1:
|
| 66 |
prmpt = story_setting_prompt(
|
| 67 |
genre,
|
|
|
|
| 76 |
)
|
| 77 |
previous = " ".join(x for x in content)
|
| 78 |
prmpt = previous + " " + prmpt
|
| 79 |
+
content.append(complete_with_gpt(prmpt, 200, "meta-llama/Llama-3.2-3B", 1500, 0.7, 1.5))
|
| 80 |
|
| 81 |
if x % 3 == 0:
|
| 82 |
prmpt = supporting_character_inclusion(
|
|
|
|
| 87 |
)
|
| 88 |
previous = " ".join(x for x in content)
|
| 89 |
prmpt = previous + " " + prmpt
|
| 90 |
+
content.append(complete_with_gpt(prmpt, 200, "meta-llama/Llama-3.2-3B", 1500, 0.7, 1.5))
|
| 91 |
if x == chapters - 1:
|
| 92 |
prmpt = ending_scene(genre, resolution, main_name, positive_messaging)
|
| 93 |
previous = " ".join(x for x in content)
|
| 94 |
prmpt = previous + " " + prmpt
|
| 95 |
+
content.append(complete_with_gpt(prmpt, 200, "meta-llama/Llama-3.2-3B", 1500, 0.7, 1.5))
|
| 96 |
else:
|
| 97 |
previous = " ".join(x for x in content)
|
| 98 |
prmpt = previous
|
| 99 |
+
content.append(complete_with_gpt(prmpt, 200, "meta-llama/Llama-3.2-3B", 1500, 0.7, 1.5))
|
| 100 |
|
| 101 |
st.write(content)
|
| 102 |
filenamee = to_pdf(convert(create_md(text=content, title=title)))
|