import gradio as gr from huggingface_hub import InferenceClient from duckduckgo_search import DDGS import re client = InferenceClient("Pinkstack/Superthoughts-lite-v1") def format_search_results(query, results, result_type): formatted = f"{result_type} search results for '{query}':\n" for i, result in enumerate(results): title = result.get('title', 'No title') description = result.get('body', '') or result.get('snippet', '') or 'No description' url = result.get('href', '') or result.get('url', '') or 'No URL' formatted += f"{i+1}. [{title}]({url})\n{description}\n\n" return formatted def extract_key_phrases(message): words = re.split(r'[,.!?;:\s]+', message.strip()) phrases = [message] for i in range(len(words) - 1): if len(words[i]) > 3 and len(words[i+1]) > 3: phrases.append(f"{words[i]} {words[i+1]}") return phrases[:3] def respond( message, history: list[tuple[str, str]], system_message, max_tokens, temperature, top_p, search_option, ): search_text = "" if search_option != "No search": with DDGS() as ddgs: if search_option == "Normal search": web_results = ddgs.text(message, max_results=3) search_text = format_search_results(message, web_results, "Web") elif search_option == "Deep research": queries = extract_key_phrases(message) search_texts = [] for query in queries: web_results = ddgs.text(query, max_results=3) news_results = ddgs.news(query, max_results=2) search_texts.append(format_search_results(query, web_results, "Web")) search_texts.append(format_search_results(query, news_results, "News")) search_text = "\n".join(search_texts) message += "\n\n**Search Results:**\n" + search_text messages = [{"role": "system", "content": system_message}] for user_msg, assistant_msg in history: if user_msg: messages.append({"role": "user", "content": user_msg}) if assistant_msg: messages.append({"role": "assistant", "content": assistant_msg}) messages.append({"role": "user", "content": message}) response = "" for msg in client.chat_completion( messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p, ): token = msg.choices[0].delta.content if token: response += token # Clean response: normalize line breaks to prevent extra

tags response = re.sub(r'\n\s*\n+', '\n', response.strip()) # Process response to convert tags to HTML divs formatted_response = response formatted_response = formatted_response.replace("", '

').replace("", "
") formatted_response = formatted_response.replace("", '
').replace("", "
") yield formatted_response css = """ .thinking { background-color: #333; color: white; padding: 15px; border-radius: 8px; margin-bottom: 10px; } .output { background-color: #f0f0f0; color: black !important; padding: 15px; border-radius: 8px; } .output * { color: black !important; } .gr-chatbot { background-color: #f5f5f5; font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; } .gr-chatbot .message { padding: 12px; border-radius: 12px; margin-bottom: 12px; max-width: 80%; } .gr-chatbot .message.user { background-color: #d1d5db; align-self: flex-end; } .gr-chatbot .message.assistant { background-color: #ffffff; align-self: flex-start; box-shadow: 0 1px 2px rgba(0,0,0,0.1); } .gr-chatbot .message.assistant * { color: inherit; } """ system_message_default = """ Respond in the following format: Your thought process here... Your final answer here... If search results are provided, you must cite relevant sources at the end of this section as a numbered list in the format: 1. [Title](URL) 2. [Title](URL) """ demo = gr.ChatInterface( respond, additional_inputs=[ gr.Textbox(value=system_message_default, label="System message", lines=8), gr.Slider(minimum=1, maximum=4096, value=2048, step=1, label="Max new tokens"), gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"), gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p"), gr.Dropdown( choices=["No search", "Normal search", "Deep research"], value="No search", label="Web search", info="Choose 'Deep research' for comprehensive web and news results." ), ], css=css, title="Pinkchat - Superthoughts lite v1 (Just 1.7B parameters!)", description="""Chat with an AI that thinks step-by-step and has web search. BETA:Added deep research.""" ) if __name__ == "__main__": demo.launch()