{ "cells": [ { "cell_type": "code", "execution_count": 2, "id": "609b9e9d-7f2b-49c3-b08f-97e6e5259242", "metadata": {}, "outputs": [ { "name": "stdin", "output_type": "stream", "text": [ "Enter the professor's name: Ali Asadpour\n", "Enter your research topic: AI in architecture\n", "Enter your name: Masih Moafi\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\Josep\\anaconda3\\envs\\myenv\\lib\\site-packages\\transformers\\generation\\configuration_utils.py:492: UserWarning: `do_sample` is set to `False`. However, `temperature` is set to `0.7` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `temperature`.\n", " warnings.warn(\n", "C:\\Users\\Josep\\anaconda3\\envs\\myenv\\lib\\site-packages\\transformers\\generation\\configuration_utils.py:497: UserWarning: `do_sample` is set to `False`. However, `top_p` is set to `0.9` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `top_p`.\n", " warnings.warn(\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "\n", "Generated Email:\n", "\n", "Dear Professor Ali Asadpour,\n", "\n", " I am writing to express my interest in pursuing research under your guidance. My research topic revolves around AI in architecture.\n", "\n", " I believe that your work in this area is groundbreaking, and I am eager to contribute to your ongoing projects.\n", "\n", " Best regards,\n", " Masih Moafi\n", " _________________________________________\n" ] } ], "source": [ "from transformers import AutoModelForCausalLM, AutoTokenizer\n", "\n", "# Load the tokenizer and model for GPT-J\n", "tokenizer = AutoTokenizer.from_pretrained(\"EleutherAI/gpt-j-6B\")\n", "model = AutoModelForCausalLM.from_pretrained(\"EleutherAI/gpt-j-6B\")\n", "\n", "def generate_email(professor_name, research_topic, user_name):\n", " \"\"\"\n", " Generate a professional and customizable email using GPT-J.\n", " Args:\n", " professor_name (str): The professor's name.\n", " research_topic (str): The user's research topic.\n", " user_name (str): The user's name.\n", " Returns:\n", " str: The generated email text.\n", " \"\"\"\n", " # Email template\n", " prompt = f\"\"\"\n", " Dear Professor {professor_name},\n", "\n", " I am writing to express my interest in pursuing research under your guidance. My research topic revolves around {research_topic}.\n", "\n", " I believe that your work in this area is groundbreaking, and I am eager to contribute to your ongoing projects.\n", "\n", " Best regards,\n", " {user_name}\n", " \"\"\"\n", "\n", " # Encode input\n", " input_ids = tokenizer.encode(prompt, return_tensors=\"pt\")\n", "\n", " # Generate email with controlled randomness\n", " output = model.generate(\n", " input_ids,\n", " max_length=len(input_ids[0]) + 100, # Slightly extend length to avoid truncation\n", " do_sample=False, # Disable sampling for deterministic output\n", " temperature=0.7, # Control output randomness\n", " top_p=0.9, # Use nucleus sampling for coherent generation\n", " pad_token_id=tokenizer.eos_token_id # Prevent padding issues\n", " )\n", "\n", " # Decode and return the text\n", " generated_email = tokenizer.decode(output[0], skip_special_tokens=True)\n", " return generated_email.strip()\n", "\n", "# Input data\n", "professor_name = input(\"Enter the professor's name: \")\n", "research_topic = input(\"Enter your research topic: \")\n", "user_name = input(\"Enter your name: \")\n", "\n", "# Generate and print the email\n", "email = generate_email(professor_name, research_topic, user_name)\n", "print(\"\\nGenerated Email:\\n\")\n", "print(email)\n" ] }, { "cell_type": "code", "execution_count": null, "id": "f575bf56-5146-49df-803a-f1ceecdbc963", "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.19" } }, "nbformat": 4, "nbformat_minor": 5 }