diff --git a/Groqqle.py b/Groqqle.py index c9e3840..5a391f4 100644 --- a/Groqqle.py +++ b/Groqqle.py @@ -83,6 +83,15 @@ def get_groq_api_key(api_key_arg: str = None) -> str: return api_key +def update_search_type(): + if st.session_state.search_type == 'News': + st.session_state.previous_temperature = st.session_state.temperature + st.session_state.temperature = 0 + else: + st.session_state.temperature = st.session_state.previous_temperature + st.session_state.search_results = None # Clear previous results when switching search type + + def update_sidebar(models): with st.sidebar: st.title("Settings") @@ -117,13 +126,32 @@ def update_sidebar(models): ) # Temperature slider - st.session_state.temperature = st.slider( - "Temperature", - min_value=0.0, - max_value=1.0, - value=st.session_state.temperature, - step=0.01 - ) + if 'previous_temperature' not in st.session_state: + st.session_state.previous_temperature = 0.0 + + is_news_search = st.session_state.get('search_type', 'Web') == 'News' + + if is_news_search: + st.session_state.temperature = 0 + st.slider( + "Temperature", + min_value=0.0, + max_value=1.0, + value=0.0, + step=0.01, + disabled=True, + key="temp_slider" + ) + else: + st.session_state.temperature = st.slider( + "Temperature", + min_value=0.0, + max_value=1.0, + value=st.session_state.previous_temperature, + step=0.01, + key="temp_slider" + ) + st.session_state.previous_temperature = st.session_state.temperature # Comprehension Grade slider grade_labels = [ @@ -134,13 +162,9 @@ def update_sidebar(models): selected_grade = st.selectbox( "Comprehension Grade", options=grade_labels, - index=st.session_state.comprehension_grade - 1 # Adjust index from grade level + index=st.session_state.comprehension_grade - 1 ) - log_debug(f"Selected comprehension grade: {selected_grade}") - selected_grade_index = grade_labels.index(selected_grade) + 1 - log_debug(f"Selected comprehension grade index: {selected_grade_index}") - st.session_state.comprehension_grade = selected_grade_index - log_debug(f"Updated comprehension grade in session state: {st.session_state.comprehension_grade}") + st.session_state.comprehension_grade = grade_labels.index(selected_grade) + 1 def main(api_key_arg: str = None, num_results: int = 10, max_tokens: int = 4096, default_summary_length: int = 300): st.set_page_config(page_title="Groqqle", layout="centered", initial_sidebar_state="collapsed") @@ -151,9 +175,9 @@ def main(api_key_arg: str = None, num_results: int = 10, max_tokens: int = 4096, if 'summary_length' not in st.session_state: st.session_state.summary_length = default_summary_length if 'selected_model' not in st.session_state: - st.session_state.selected_model = "mixtral-8x7b-32768" + st.session_state.selected_model = "llama3-8b-8192" if 'temperature' not in st.session_state: - st.session_state.temperature = 0.5 + st.session_state.temperature = 0.0 if 'comprehension_grade' not in st.session_state: st.session_state.comprehension_grade = 8 if 'context_window' not in st.session_state: @@ -248,10 +272,7 @@ def main(api_key_arg: str = None, num_results: int = 10, max_tokens: int = 4096, if st.button("Groqqle Search", key="search_button"): perform_search() with col2: - search_type = st.radio("Search Type", ["Web", "News"], index=0, key="search_type") - if search_type != st.session_state.search_type: - st.session_state.search_type = search_type - st.session_state.search_results = None # Clear previous results when switching search type + search_type = st.radio("Search Type", ["Web", "News"], index=0, key="search_type", on_change=update_search_type) with col4: json_results = st.checkbox("JSON Results", value=False, key="json_results") @@ -271,7 +292,7 @@ def perform_search(): summary_length = st.session_state.summary_length selected_model = st.session_state.selected_model context_window = st.session_state.context_window - temperature = st.session_state.temperature + temperature = 0 if st.session_state.search_type == 'News' else st.session_state.temperature comprehension_grade = st.session_state.comprehension_grade search_type = st.session_state.search_type @@ -409,7 +430,7 @@ def api_search(): max_tokens = data.get('max_tokens', default_max_tokens) summary_length = data.get('summary_length', default_summary_length) model = data.get('model', 'mixtral-8x7b-32768') - temperature = data.get('temperature', 0.5) + temperature = data.get('temperature', 0.0) comprehension_grade = data.get('comprehension_grade', 8) search_type = data.get('search_type', 'web').lower() # Default to 'web' if not provided @@ -481,7 +502,7 @@ def print_startup_message(): "max_tokens": 4096, "summary_length": 200, "model": "mixtral-8x7b-32768", - "temperature": 0.5, + "temperature": 0.0, "comprehension_grade": 8, "search_type": "web" // Use "web" for web search or "news" for news search } diff --git a/agents/News_Agent.py b/agents/News_Agent.py index d62364c..dcf4e02 100644 --- a/agents/News_Agent.py +++ b/agents/News_Agent.py @@ -17,7 +17,7 @@ def log_debug(message): print(f"Debug: {message}") class News_Agent(Base_Agent): - def __init__(self, api_key, provider_name='groq', num_results=10, max_tokens=4096, model="mixtral-8x7b-32768", temperature=0.5, comprehension_grade=8): + def __init__(self, api_key, provider_name='groq', num_results=10, max_tokens=4096, model="mixtral-8x7b-32768", temperature=0.0, comprehension_grade=8): log_debug(f"Initializing News_Agent with provider_name: {provider_name}, num_results: {num_results}, max_tokens: {max_tokens}, model: {model}, temperature: {temperature}, comprehension_grade: {comprehension_grade}") if not api_key: @@ -60,7 +60,7 @@ def _perform_news_search(self, query: str) -> List[Dict[str, Any]]: log_debug(f"Performing news search with query: {query} and num_results: {self.num_results}") encoded_query = quote_plus(query) - base_url = f'https://www.bing.com/news/search?q={encoded_query}&qft=interval%3d"7"&qft=sortbydate' + base_url = f'https://www.bing.com/news/search?q={encoded_query}&qft=interval%3d"7"&qft=sortbydate%3d"1" ' headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36', @@ -139,49 +139,50 @@ def _create_summary_prompt(self, content: str, url: str) -> str: log_debug(f"Selected grade description: {grade_description}") return f""" - Summarize the following news content from {url} for {grade_description}: - {content[:6000]} # Limit content to first 6000 characters - - Your task is to provide a comprehensive and informative synopsis of the main subject matter, along with an SEO-optimized headline. Follow these guidelines: - - 1. Generate an SEO-optimized headline that: - - Captures user interest without sensationalism - - Accurately represents the main topic - - Uses relevant keywords - - Is concise (ideally 50-60 characters) - - Maintains professionalism - - Does not begin with anything akin to "Imagine" or "Picture this" - - 2. Format your headline exactly as follows: - HEADLINE: [Your SEO-optimized headline here] - - 3. Write your summary using the inverted pyramid style: - - Start with a strong lede (opening sentence) that entices readers and summarizes the most crucial information - - Present the most important information first - - Follow with supporting details and context - - End with the least essential information - - Don't mention the parts of the pyramid. Just follow the structure. No need to say "in conclusion" in the conclusion, for example. - - 4. Adjust the language complexity strictly targeted to the reading level for {grade_description}. This means: - - Use vocabulary appropriate for this comprehension level - - Adjust sentence structure complexity accordingly - - Explain concepts in a way that would be clear to someone at this educational level - - Do not specifically mention the target's age or grade level in the summary response - - 5. Clearly explain the main topic or discovery being discussed - 6. Highlight key points, findings, or arguments presented in the content - 7. Provide relevant context or background information that helps understand the topic - 8. Mention any significant implications, applications, or future directions discussed - 9. If applicable, include important quotes or statistics that support the main points - 10. Never discuss or reference the reference material, article, video, source, or author in the summary - - Use a neutral, journalistic tone, and ensure that you're reporting the facts as presented in the content, not adding personal opinions or speculation. - - Format your response as follows: - HEADLINE: [Your SEO-optimized headline here] - - [Your comprehensive summary here, following the inverted pyramid style] - """ + Summarize the following news content from {url} for {grade_description}: + {content} + + Your task is to provide a comprehensive and informative synopsis of the main subject matter, along with an SEO-optimized headline. The summary must stand alone, without mentioning the original source, its authors, or any references to articles, videos, or materials. Follow these guidelines: + + 1. Generate an SEO-optimized headline that: + - Captures user interest without sensationalism + - Accurately represents the main topic + - Uses relevant keywords + - Is concise (ideally 50-60 characters) + - Maintains professionalism + - Does not begin with anything akin to "Imagine" or "Picture this" + + 2. Format your headline exactly as follows: + HEADLINE: [Your SEO-optimized headline here] + + 3. Write your summary using the inverted pyramid style: + - Start with a strong lede (opening sentence) that entices readers and summarizes the most crucial information + - Present the most important information first + - Follow with supporting details and context + - End with the least essential information + - Don't mention the parts of the pyramid. Just follow the structure. No need to say "in conclusion." + + 4. Adjust the language complexity strictly targeted to the reading level for {grade_description}. This means: + - Use vocabulary appropriate for this comprehension level + - Adjust sentence structure complexity accordingly + - Explain concepts in a way that would be clear to someone at this educational level + - Do not specifically mention the target's age or grade level in the summary response + + 5. Clearly explain the main topic or discovery being discussed + 6. Highlight key points, findings, or arguments presented in the content + 7. Provide relevant context or background information that helps understand the topic + 8. Mention any significant implications, applications, or future directions discussed + 9. If applicable, include important quotes or statistics that support the main points + 10. **Never refer to the original article, source, author, publisher, or any media format**. The summary must be a complete stand-alone piece without attribution to external sources. + + Use a neutral, journalistic tone, and ensure that you're reporting the facts as presented in the content, not adding personal opinions or speculation. + + Format your response as follows: + HEADLINE: [Your SEO-optimized headline here] + + [Your comprehensive summary here, following the inverted pyramid style] + """ + def _format_summary(self, summary: str, url: str) -> Dict[str, str]: parts = summary.split('\n', 1) diff --git a/agents/Web_Agent.py b/agents/Web_Agent.py index 8f589ad..0c022d2 100644 --- a/agents/Web_Agent.py +++ b/agents/Web_Agent.py @@ -59,7 +59,7 @@ def sanitize_message(message): ProviderFactory = None class Web_Agent(Base_Agent): - def __init__(self, api_key, provider_name='groq', num_results=10, max_tokens=4096, model="mixtral-8x7b-32768", temperature=0.5, comprehension_grade=8, summary_length=300): + def __init__(self, api_key, provider_name='groq', num_results=10, max_tokens=4096, model="mixtral-8x7b-32768", temperature=0.0, comprehension_grade=8, summary_length=300): log_debug(f"Initializing Web_Agent with provider_name: {provider_name}, num_results: {num_results}, max_tokens: {max_tokens}, model: {model}, temperature: {temperature}, comprehension_grade: {comprehension_grade}, summary_length: {summary_length}") if not api_key: log_debug("API key is missing or empty") diff --git a/src/Groqqle.md b/src/Groqqle.md index 1408b2c..82e49b5 100644 --- a/src/Groqqle.md +++ b/src/Groqqle.md @@ -156,7 +156,7 @@ def main(api_key_arg: str = None, num_results: int = 10, max_tokens: int = 4096, if 'selected_model' not in st.session_state: st.session_state.selected_model = "mixtral-8x7b-32768" if 'temperature' not in st.session_state: - st.session_state.temperature = 0.5 + st.session_state.temperature = 0.0 if 'comprehension_grade' not in st.session_state: st.session_state.comprehension_grade = 8 if 'context_window' not in st.session_state: @@ -412,7 +412,7 @@ def create_api_app(api_key_arg: str = None, default_num_results: int = 10, defau max_tokens = data.get('max_tokens', default_max_tokens) summary_length = data.get('summary_length', default_summary_length) model = data.get('model', 'mixtral-8x7b-32768') - temperature = data.get('temperature', 0.5) + temperature = data.get('temperature', 0.0) comprehension_grade = data.get('comprehension_grade', 8) search_type = data.get('search_type', 'web').lower() # Default to 'web' if not provided @@ -484,7 +484,7 @@ if __name__ == "__main__": "max_tokens": 4096, "summary_length": 200, "model": "mixtral-8x7b-32768", - "temperature": 0.5, + "temperature": 0.0, "comprehension_grade": 8, "search_type": "web" // Use "web" for web search or "news" for news search } @@ -580,7 +580,7 @@ def log_debug(message): print(f"Debug: {message}") class News_Agent(Base_Agent): - def __init__(self, api_key, provider_name='groq', num_results=10, max_tokens=4096, model="mixtral-8x7b-32768", temperature=0.5, comprehension_grade=8): + def __init__(self, api_key, provider_name='groq', num_results=10, max_tokens=4096, model="mixtral-8x7b-32768", temperature=0.0, comprehension_grade=8): log_debug(f"Initializing News_Agent with provider_name: {provider_name}, num_results: {num_results}, max_tokens: {max_tokens}, model: {model}, temperature: {temperature}, comprehension_grade: {comprehension_grade}") if not api_key: @@ -843,7 +843,7 @@ except ImportError as e: ProviderFactory = None class Web_Agent(Base_Agent): - def __init__(self, api_key, provider_name='groq', num_results=10, max_tokens=4096, model="mixtral-8x7b-32768", temperature=0.5, comprehension_grade=8, summary_length=300): + def __init__(self, api_key, provider_name='groq', num_results=10, max_tokens=4096, model="mixtral-8x7b-32768", temperature=0.0, comprehension_grade=8, summary_length=300): log_debug(f"Initializing Web_Agent with provider_name: {provider_name}, num_results: {num_results}, max_tokens: {max_tokens}, model: {model}, temperature: {temperature}, comprehension_grade: {comprehension_grade}, summary_length: {summary_length}") if not api_key: log_debug("API key is missing or empty") @@ -1424,7 +1424,7 @@ def WebGetContents_Tool(URL): headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,/;q=0.8', - 'Accept-Language': 'en-US,en;q=0.5', + 'Accept-Language': 'en-US,en;q=0.0', 'Referer': 'https://www.google.com/', 'DNT': '1', 'Connection': 'keep-alive', @@ -1584,7 +1584,7 @@ class WebGetStocks_Tool(Base_Tool): headers = { "User-Agent": random.choice(user_agents), "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", - "Accept-Language": "en-US,en;q=0.5", + "Accept-Language": "en-US,en;q=0.0", "Accept-Encoding": "gzip, deflate, br", "DNT": "1", "Connection": "keep-alive", @@ -1782,7 +1782,7 @@ def WebSearch_Tool(query: str, num_results: int = 10): headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,/;q=0.8', - 'Accept-Language': 'en-US,en;q=0.5', + 'Accept-Language': 'en-US,en;q=0.0', 'Referer': 'https://www.google.com/', 'DNT': '1', 'Connection': 'keep-alive', diff --git a/tools/web_tools/WebGetContents_Tool.py b/tools/web_tools/WebGetContents_Tool.py index 1bbe2cd..567a0aa 100644 --- a/tools/web_tools/WebGetContents_Tool.py +++ b/tools/web_tools/WebGetContents_Tool.py @@ -15,7 +15,7 @@ def WebGetContents_Tool(URL): headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,/;q=0.8', - 'Accept-Language': 'en-US,en;q=0.5', + 'Accept-Language': 'en-US,en;q=0.0', 'Referer': 'https://www.google.com/', 'DNT': '1', 'Connection': 'keep-alive', diff --git a/tools/web_tools/WebGetStocks_Tool.py b/tools/web_tools/WebGetStocks_Tool.py index b2c529f..0b6a916 100644 --- a/tools/web_tools/WebGetStocks_Tool.py +++ b/tools/web_tools/WebGetStocks_Tool.py @@ -34,7 +34,7 @@ def execute(self, symbol: str) -> Optional[Dict[str, str]]: headers = { "User-Agent": random.choice(user_agents), "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", - "Accept-Language": "en-US,en;q=0.5", + "Accept-Language": "en-US,en;q=0.0", "Accept-Encoding": "gzip, deflate, br", "DNT": "1", "Connection": "keep-alive", diff --git a/tools/web_tools/WebSearch_Tool.py b/tools/web_tools/WebSearch_Tool.py index d94dbc4..c504b8a 100644 --- a/tools/web_tools/WebSearch_Tool.py +++ b/tools/web_tools/WebSearch_Tool.py @@ -14,7 +14,7 @@ def WebSearch_Tool(query: str, num_results: int = 10): headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,/;q=0.8', - 'Accept-Language': 'en-US,en;q=0.5', + 'Accept-Language': 'en-US,en;q=0.0', 'Referer': 'https://www.google.com/', 'DNT': '1', 'Connection': 'keep-alive', diff --git a/tools/web_tools/__pycache__/WebGetContents_Tool.cpython-311.pyc b/tools/web_tools/__pycache__/WebGetContents_Tool.cpython-311.pyc index 34ec064..d5d9600 100644 Binary files a/tools/web_tools/__pycache__/WebGetContents_Tool.cpython-311.pyc and b/tools/web_tools/__pycache__/WebGetContents_Tool.cpython-311.pyc differ diff --git a/tools/web_tools/__pycache__/WebSearch_Tool.cpython-311.pyc b/tools/web_tools/__pycache__/WebSearch_Tool.cpython-311.pyc index f871ed0..7bb9cef 100644 Binary files a/tools/web_tools/__pycache__/WebSearch_Tool.cpython-311.pyc and b/tools/web_tools/__pycache__/WebSearch_Tool.cpython-311.pyc differ