|
import json, os |
|
from together import Together |
|
|
|
def generate_article_from_query(query, config_file='config.json', model="meta-llama/Llama-3-8b-chat-hf"): |
|
""" |
|
Generates an article based on the given query using the Together API. |
|
|
|
Parameters: |
|
- query (str): The input query for generating the article. |
|
- config_file (str): Path to the JSON file containing the API key. Default is 'config.json'. |
|
- model (str): The Together AI model to use. Default is "meta-llama/Llama-3-8b-chat-hf". |
|
|
|
Returns: |
|
- str: The generated article content. |
|
""" |
|
|
|
together_ai_key = os.getenv("TOGETHER_AI") |
|
if not together_ai_key: |
|
raise ValueError("TOGETHER_AI environment variable not found. Please set it before running the script.") |
|
|
|
|
|
|
|
client = Together(api_key=together_ai_key) |
|
|
|
|
|
prompt = f"""Using the query provided, generate a well-researched and informative short article. The article should be detailed, accurate, and structured to cover various aspects of the topic in an engaging way. Focus on presenting key facts, historical context, notable insights, and any relevant background information that adds value to the reader’s understanding. Ensure the tone is neutral and informative. Keep the article short. Here’s the query: |
|
|
|
Query: {query}""" |
|
|
|
|
|
response = client.chat.completions.create( |
|
model=model, |
|
messages=[{"role": "user", "content": prompt}], |
|
) |
|
|
|
return response.choices[0].message.content |
|
|
|
|
|
|
|
|
|
|
|
|
|
|