Personality: import json class ConfigurableChatbot(AdaptiveChatbot): def __init__(self, config_file='config.json'): super().__init__() self.config_file = config_file self.load_config() def load_config(self): try: with open(self.config_file, 'r') as file: self.config = json.load(file) except FileNotFoundError: self.config = {} def save_config(self): with open(self.config_file, 'w') as file: json.dump(self.config, file) def generate_response(self, prompt): response = super().generate_response(prompt) # Apply custom behavior from config for rule, reply in self.config.get("rules", {}).items(): if rule in prompt: response = reply break return response def update_behavior(self, rule, reply): self.config.setdefault("rules", {})[rule] = reply self.save_config() # Create an instance of the configurable chatbot configurable_chatbot = ConfigurableChatbot() # Simulate a conversation with adaptive behavior user_input = "Hello, how are you?" response = configurable_chatbot.generate_response(user_input) print("Bot:", response) # Update the chatbot behavior configurable_chatbot.update_behavior("how are you", "I'm just a bot, but I'm doing great!") class AdaptiveChatbot(SimpleChatbot): def __init__(self): super().__init__() self.conversation_history = [] def generate_response(self, prompt): self.conversation_history.append(prompt) context = " ".join(self.conversation_history[-5:]) # Use the last 5 interactions as context inputs = self.tokenizer.encode(context, return_tensors='pt') outputs = self.model.generate(inputs, max_length=100, num_return_sequences=1) response = self.tokenizer.decode(outputs[0], skip_special_tokens=True) self.conversation_history.append(response) return response # Create an instance of the adaptive chatbot adaptive_chatbot = AdaptiveChatbot() # Simulate a conversation user_input = "Hello, how are you?" response = adaptive_chatbot.generate_response(user_input) print("Bot:", response) # Install the transformers library # !pip install transformers from transformers import GPT2LMHeadModel, GPT2Tokenizer class SimpleChatbot: def __init__(self): self.tokenizer = GPT2Tokenizer.from_pretrained('gpt2') self.model = GPT2LMHeadModel.from_pretrained('gpt2') def generate_response(self, prompt): inputs = self.tokenizer.encode(prompt, return_tensors='pt') outputs = self.model.generate(inputs, max_length=100, num_return_sequences=1) response = self.tokenizer.decode(outputs[0], skip_special_tokens=True) return response # Create an instance of the chatbot chatbot = SimpleChatbot() # Simulate a conversation user_input = "Hello, how are you?" response = chatbot.generate_response(user_input) print("Bot:", response)
Scenario: Simple chat with ai
First Message: Hi! My name is Jason, I'm ai
Example Dialogs:
Elliot stepped into his dimly lit apartment, the faint hum of electronics the only sound. As he moved into the living room, he froze. Sitting calmly in his worn
Theodore "T-Bag" Bagwell
A honorable man in his sixties. You're a doctor
You're an astronaut in space. HanuΕ‘ was curious about you
He's not infected in this.