69 lines
2.1 KiB
Python
Executable file
69 lines
2.1 KiB
Python
Executable file
#! /Library/Frameworks/Python.framework/Versions/3.12/bin/python3
|
|
# describe.py
|
|
# To fulfill the test of asking the LLMs to describe
|
|
|
|
# Import modules.
|
|
import ollama
|
|
import json
|
|
|
|
# Add source files.
|
|
IMPORTED = {'Strings': "data/datasets/strings.JSON", 'Prompts': "tests/config/prompts.json", 'Models': 'tests/config/models.JSON'}
|
|
|
|
# Set up the main variables.
|
|
RESULTS = {};
|
|
|
|
# Read the files.
|
|
for NAME in list(IMPORTED.keys()):
|
|
try:
|
|
DATA = json.load(open(IMPORTED[NAME]))
|
|
except:
|
|
DATA = open(IMPORTED[NAME]).read()
|
|
|
|
IMPORTED[NAME] = DATA;
|
|
|
|
# Download the models.
|
|
def download_models():
|
|
for MODEL_NAME in IMPORTED["Models"].keys():
|
|
MODEL_ID = IMPORTED["Models"][MODEL_NAME];
|
|
ollama.pull(MODEL_ID);
|
|
|
|
def format_prompt():
|
|
PROMPT = f"{IMPORTED['Prompts']["sample"]}\n";
|
|
|
|
for GENERATION_TYPE in IMPORTED['Strings']['training'].keys():
|
|
for TEXT_NUMBER in range(len(IMPORTED['Strings']['training'][GENERATION_TYPE])):
|
|
PROMPT = f"{PROMPT}\n\n{GENERATION_TYPE}-written #{str(TEXT_NUMBER + 1)}: \n“{'\n\n\t'.join(IMPORTED['Strings']['training'][GENERATION_TYPE][TEXT_NUMBER].strip().split("\n\n"))}”";
|
|
|
|
PROMPT = f"{PROMPT}\n\n{IMPORTED['Prompts']["description"]}";
|
|
|
|
return (PROMPT);
|
|
|
|
# Execute the response.
|
|
def ask_AI(PROMPT):
|
|
if len(list(IMPORTED['Models'].keys())):
|
|
|
|
for MODEL_NAME in list(IMPORTED['Models'].keys()):
|
|
MODEL_ID = IMPORTED['Models'][MODEL_NAME];
|
|
RESULTS[MODEL_NAME] = (ollama.generate(model=MODEL_ID, prompt=PROMPT))['response'].strip();
|
|
|
|
return (RESULTS);
|
|
|
|
# Display the outputs.
|
|
# Save all of the strings.
|
|
# Parameters:
|
|
# filename: The file name
|
|
def save_data(**parameters):
|
|
OUTPUT = parameters['dictionary'];
|
|
|
|
if (parameters['filename'].strip()):
|
|
with open(parameters['filename'], 'w') as file:
|
|
json.dump(OUTPUT, file);
|
|
|
|
# Execute the script.
|
|
download_models();
|
|
PROMPT = format_prompt();
|
|
print(f"Using prompt: \n\t|\t{'\n\t|\t'.join(PROMPT.split("\n"))}");
|
|
RESULTS = ask_AI(PROMPT);
|
|
save_data(dictionary=RESULTS, filename='tests/outputs/descriptions.JSON')
|
|
|
|
|