Update Email Read Allowed.ipynb
This commit is contained in:
parent
79558a65ce
commit
e1fff1a36d
@ -1,137 +1,230 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "1875b60b-7d4c-4e3a-9751-c9432c395590",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Some weights of the model checkpoint at EleutherAI/gpt-j-6B were not used when initializing GPTJForCausalLM: ['transformer.h.0.attn.bias', 'transformer.h.0.attn.masked_bias', 'transformer.h.1.attn.bias', 'transformer.h.1.attn.masked_bias', 'transformer.h.10.attn.bias', 'transformer.h.10.attn.masked_bias', 'transformer.h.11.attn.bias', 'transformer.h.11.attn.masked_bias', 'transformer.h.12.attn.bias', 'transformer.h.12.attn.masked_bias', 'transformer.h.13.attn.bias', 'transformer.h.13.attn.masked_bias', 'transformer.h.14.attn.bias', 'transformer.h.14.attn.masked_bias', 'transformer.h.15.attn.bias', 'transformer.h.15.attn.masked_bias', 'transformer.h.16.attn.bias', 'transformer.h.16.attn.masked_bias', 'transformer.h.17.attn.bias', 'transformer.h.17.attn.masked_bias', 'transformer.h.18.attn.bias', 'transformer.h.18.attn.masked_bias', 'transformer.h.19.attn.bias', 'transformer.h.19.attn.masked_bias', 'transformer.h.2.attn.bias', 'transformer.h.2.attn.masked_bias', 'transformer.h.20.attn.bias', 'transformer.h.20.attn.masked_bias', 'transformer.h.21.attn.bias', 'transformer.h.21.attn.masked_bias', 'transformer.h.22.attn.bias', 'transformer.h.22.attn.masked_bias', 'transformer.h.23.attn.bias', 'transformer.h.23.attn.masked_bias', 'transformer.h.24.attn.bias', 'transformer.h.24.attn.masked_bias', 'transformer.h.25.attn.bias', 'transformer.h.25.attn.masked_bias', 'transformer.h.26.attn.bias', 'transformer.h.26.attn.masked_bias', 'transformer.h.27.attn.bias', 'transformer.h.27.attn.masked_bias', 'transformer.h.3.attn.bias', 'transformer.h.3.attn.masked_bias', 'transformer.h.4.attn.bias', 'transformer.h.4.attn.masked_bias', 'transformer.h.5.attn.bias', 'transformer.h.5.attn.masked_bias', 'transformer.h.6.attn.bias', 'transformer.h.6.attn.masked_bias', 'transformer.h.7.attn.bias', 'transformer.h.7.attn.masked_bias', 'transformer.h.8.attn.bias', 'transformer.h.8.attn.masked_bias', 'transformer.h.9.attn.bias', 'transformer.h.9.attn.masked_bias']\n",
|
||||
"- This IS expected if you are initializing GPTJForCausalLM from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
|
||||
"- This IS NOT expected if you are initializing GPTJForCausalLM from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdin",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Enter the professor's name: Asadpour\n",
|
||||
"Enter your research topic: AI\n",
|
||||
"Enter your name: Masih\n"
|
||||
]
|
||||
'use client';
|
||||
|
||||
import { useState, useEffect, useRef } from 'react';
|
||||
|
||||
type Message = {
|
||||
text: string;
|
||||
sender: 'user' | 'bot';
|
||||
};
|
||||
|
||||
export default function Chatbot() {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [messages, setMessages] = useState<Message[]>([]);
|
||||
const [inputText, setInputText] = useState('');
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const chatContainerRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const handleSendMessage = async () => {
|
||||
if (!inputText.trim() || isLoading) return;
|
||||
|
||||
try {
|
||||
setIsLoading(true);
|
||||
const userMessage = inputText.trim();
|
||||
setMessages(prev => [...prev, { text: userMessage, sender: 'user' }]);
|
||||
setInputText('');
|
||||
|
||||
// Directly call Ollama API via local tunnel
|
||||
const response = await fetch('https://joe-ollama.loca.lt/api/generate', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Basic ' + btoa('username:password') // Add your auth
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'llama3',
|
||||
prompt: userMessage,
|
||||
stream: false,
|
||||
options: {
|
||||
temperature: 0.7,
|
||||
max_tokens: 500
|
||||
}
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) throw new Error('Failed to get response');
|
||||
|
||||
const data = await response.json();
|
||||
setMessages(prev => [...prev, { text: data.response, sender: 'bot' }]);
|
||||
} catch (error) {
|
||||
console.error('Chat error:', error);
|
||||
setMessages(prev => [...prev, {
|
||||
text: "Sorry, I'm having trouble connecting. Please try again later.",
|
||||
sender: 'bot'
|
||||
}]);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from transformers import AutoModelForCausalLM, AutoTokenizer\n",
|
||||
"from gtts import gTTS\n",
|
||||
"from playsound import playsound\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"# Load the tokenizer and model for GPT-J\n",
|
||||
"tokenizer = AutoTokenizer.from_pretrained(\"EleutherAI/gpt-j-6B\")\n",
|
||||
"model = AutoModelForCausalLM.from_pretrained(\"EleutherAI/gpt-j-6B\")\n",
|
||||
"\n",
|
||||
"# Set the padding token to the EOS token\n",
|
||||
"tokenizer.pad_token = tokenizer.eos_token\n",
|
||||
"\n",
|
||||
"def generate_email(professor_name, research_topic, user_name):\n",
|
||||
" # Email template\n",
|
||||
" prompt = f\"\"\"\n",
|
||||
" Dear Professor {professor_name},\n",
|
||||
"\n",
|
||||
" I am writing to express my interest in pursuing research under your guidance. My research topic revolves around {research_topic}.\n",
|
||||
"\n",
|
||||
" I believe that your work in this area is groundbreaking, and I am eager to contribute to your ongoing projects.\n",
|
||||
"\n",
|
||||
" Best regards,\n",
|
||||
" {user_name}\n",
|
||||
" \"\"\"\n",
|
||||
" # Tokenize input\n",
|
||||
" inputs = tokenizer(prompt, return_tensors=\"pt\", truncation=True, padding=True)\n",
|
||||
" # Generate email with controlled randomness\n",
|
||||
" output = model.generate(\n",
|
||||
" inputs[\"input_ids\"],\n",
|
||||
" attention_mask=inputs[\"attention_mask\"],\n",
|
||||
" max_length=len(inputs[\"input_ids\"][0]) + 100,\n",
|
||||
" do_sample=True, # Set to True to use temperature and top_p\n",
|
||||
" temperature=0.7,\n",
|
||||
" top_p=0.9,\n",
|
||||
" pad_token_id=tokenizer.eos_token_id\n",
|
||||
" )\n",
|
||||
" # Decode and return the text\n",
|
||||
" generated_email = tokenizer.decode(output[0], skip_special_tokens=True)\n",
|
||||
" return generated_email.strip()\n",
|
||||
"\n",
|
||||
"def text_to_speech(text, output_file=\"email.mp3\", lang='en'):\n",
|
||||
" try:\n",
|
||||
" if os.path.exists(output_file):\n",
|
||||
" os.remove(output_file)\n",
|
||||
" tts = gTTS(text, lang=lang)\n",
|
||||
" tts.save(output_file)\n",
|
||||
" print(f\"Speech saved to {output_file}\")\n",
|
||||
" except Exception as e:\n",
|
||||
" print(f\"Error generating speech: {e}\")\n",
|
||||
"\n",
|
||||
"def play_sound(file_path):\n",
|
||||
" if not os.path.exists(file_path):\n",
|
||||
" print(\"File not found.\")\n",
|
||||
" return\n",
|
||||
" try:\n",
|
||||
" # Using playsound\n",
|
||||
" playsound(file_path)\n",
|
||||
" print(\"\\nEmail is being read aloud.\")\n",
|
||||
" except Exception as e:\n",
|
||||
" print(f\"Error playing sound with playsound: {e}\")\n",
|
||||
"\n",
|
||||
"# Input data\n",
|
||||
"professor_name = input(\"Enter the professor's name: \")\n",
|
||||
"research_topic = input(\"Enter your research topic: \")\n",
|
||||
"user_name = input(\"Enter your name: \")\n",
|
||||
"\n",
|
||||
"# Generate and print the email\n",
|
||||
"email = generate_email(professor_name, research_topic, user_name)\n",
|
||||
"print(\"\\nGenerated Email:\\n\")\n",
|
||||
"print(email)\n",
|
||||
"\n",
|
||||
"# Convert the email to speech\n",
|
||||
"text_to_speech(email)\n",
|
||||
"\n",
|
||||
"# Play the generated speech\n",
|
||||
"play_sound(\"email.mp3\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "89bc527f-4f37-44d3-8454-52a980cf6038",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.19"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (chatContainerRef.current) {
|
||||
chatContainerRef.current.scrollTop = chatContainerRef.current.scrollHeight;
|
||||
}
|
||||
}, [messages]);
|
||||
|
||||
return (
|
||||
<div style={{ position: 'fixed', bottom: '20px', right: '29.6px', zIndex: 1000 }}>
|
||||
{isOpen && (
|
||||
<div style={chatWindowStyle}>
|
||||
<div style={headerStyle}>
|
||||
<div style={titleStyle}>Chat with Joe's AI</div>
|
||||
<button onClick={() => setIsOpen(false)} style={closeButtonStyle}>
|
||||
×
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div ref={chatContainerRef} style={messageContainerStyle}>
|
||||
{messages.map((msg, index) => (
|
||||
<div key={index} style={messageBubbleStyle(msg.sender)}>
|
||||
<span style={textStyle(msg.sender)}>
|
||||
{msg.text}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
{isLoading && (
|
||||
<div style={loadingStyle}>
|
||||
<div className="dot-flashing" />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div style={inputContainerStyle}>
|
||||
<input
|
||||
type="text"
|
||||
value={inputText}
|
||||
onChange={(e) => setInputText(e.target.value)}
|
||||
onKeyPress={(e) => e.key === 'Enter' && handleSendMessage()}
|
||||
style={inputStyle}
|
||||
placeholder="Ask me anything..."
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<button
|
||||
onClick={handleSendMessage}
|
||||
style={sendButtonStyle}
|
||||
disabled={isLoading}
|
||||
>
|
||||
{isLoading ? '...' : 'Send'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isOpen && (
|
||||
<button
|
||||
onClick={() => setIsOpen(true)}
|
||||
style={toggleButtonStyle}
|
||||
>
|
||||
💬
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Style constants
|
||||
const chatWindowStyle = {
|
||||
width: '350px',
|
||||
border: '1px solid #e5e7eb',
|
||||
borderRadius: '12px',
|
||||
padding: '16px',
|
||||
backgroundColor: '#ffffff',
|
||||
boxShadow: '0 4px 6px rgba(0, 0, 0, 0.1)',
|
||||
};
|
||||
|
||||
const headerStyle = {
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center',
|
||||
marginBottom: '16px',
|
||||
};
|
||||
|
||||
const titleStyle = {
|
||||
fontSize: '18px',
|
||||
fontWeight: '600',
|
||||
color: '#1f2937',
|
||||
};
|
||||
|
||||
const closeButtonStyle = {
|
||||
padding: '8px',
|
||||
borderRadius: '50%',
|
||||
border: 'none',
|
||||
backgroundColor: '#31616c',
|
||||
color: '#fff',
|
||||
cursor: 'pointer',
|
||||
fontWeight: '600',
|
||||
width: '32px',
|
||||
height: '32px',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
};
|
||||
|
||||
const messageContainerStyle = {
|
||||
height: '330px',
|
||||
overflowY: 'auto',
|
||||
marginBottom: '16px',
|
||||
padding: '8px',
|
||||
backgroundColor: '#f3f4f6',
|
||||
borderRadius: '8px',
|
||||
};
|
||||
|
||||
const messageBubbleStyle = (sender: 'user' | 'bot') => ({
|
||||
textAlign: sender === 'user' ? 'right' : 'left',
|
||||
margin: '8px 0',
|
||||
});
|
||||
|
||||
const textStyle = (sender: 'user' | 'bot') => ({
|
||||
display: 'inline-block',
|
||||
padding: '8px 12px',
|
||||
borderRadius: '12px',
|
||||
backgroundColor: sender === 'user' ? '#31616c' : '#e5e7eb',
|
||||
color: sender === 'user' ? '#fff' : '#1f2937',
|
||||
maxWidth: '80%',
|
||||
wordWrap: 'break-word' as const,
|
||||
});
|
||||
|
||||
const inputContainerStyle = {
|
||||
display: 'flex',
|
||||
gap: '8px',
|
||||
};
|
||||
|
||||
const inputStyle = {
|
||||
flex: 1,
|
||||
padding: '8px',
|
||||
borderRadius: '8px',
|
||||
border: '1px solid #e5e7eb',
|
||||
backgroundColor: '#ffffff',
|
||||
color: '#1f2937',
|
||||
outline: 'none',
|
||||
};
|
||||
|
||||
const sendButtonStyle = {
|
||||
padding: '8px 16px',
|
||||
borderRadius: '8px',
|
||||
border: 'none',
|
||||
backgroundColor: '#31616c',
|
||||
color: '#fff',
|
||||
cursor: 'pointer',
|
||||
fontWeight: '600',
|
||||
};
|
||||
|
||||
const toggleButtonStyle = {
|
||||
padding: '12px',
|
||||
borderRadius: '50%',
|
||||
border: 'none',
|
||||
backgroundColor: '#31616c',
|
||||
color: '#fff',
|
||||
cursor: 'pointer',
|
||||
boxShadow: '0 4px 6px rgba(0, 0, 0, 0.1)',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
width: '48px',
|
||||
height: '48px',
|
||||
};
|
||||
|
||||
const loadingStyle = {
|
||||
display: 'flex',
|
||||
justifyContent: 'center',
|
||||
padding: '8px',
|
||||
};
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user