communex.module.example.gpt
1import json 2from enum import Enum 3from os import getenv 4 5from fastapi import HTTPException 6from openai import OpenAI # type: ignore 7 8from communex.module.module import Module, endpoint 9from communex.module.server import ModuleServer 10 11OPENAI_API_KEY = getenv("OPENAI_API_KEY") 12 13 14class OpenAIModels(str, Enum): 15 three = "gpt-3.5-turbo" 16 17 18class OpenAIModule(Module): 19 def __init__(self) -> None: 20 super().__init__() 21 self.client = OpenAI(api_key=OPENAI_API_KEY) # type: ignore 22 23 @endpoint 24 def prompt(self, text: str, model: OpenAIModels): 25 response = self.client.chat.completions.create( # type: ignore 26 model=model, 27 response_format={"type": "json_object"}, 28 messages=[ 29 { 30 "role": "system", 31 "content": "You are a helpful assistant designed to output JSON.", 32 }, 33 {"role": "user", "content": text}, 34 ], 35 ) 36 answers: list[dict[str, str]] = [] 37 for msg in response.choices: # type: ignore 38 finish_reason = msg.finish_reason # type: ignore 39 if finish_reason != "stop": 40 raise HTTPException(418, finish_reason) 41 content = msg.message.content # type: ignore 42 if content: 43 answers.append(json.loads(content)) # type: ignore 44 45 return {"Answer": answers} 46 47 48if __name__ == "__main__": 49 import uvicorn 50 51 from communex.compat.key import classic_load_key 52 53 model = OpenAIModule() 54 key = classic_load_key("test") 55 model_server = ModuleServer(model, key) 56 app = model_server.get_fastapi_app() 57 58 uvicorn.run(app, host="127.0.0.1", port=8000)
OPENAI_API_KEY =
None
class
OpenAIModels(builtins.str, enum.Enum):
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
three =
<OpenAIModels.three: 'gpt-3.5-turbo'>
Inherited Members
- enum.Enum
- name
- value
- builtins.str
- encode
- replace
- split
- rsplit
- join
- capitalize
- casefold
- title
- center
- count
- expandtabs
- find
- partition
- index
- ljust
- lower
- lstrip
- rfind
- rindex
- rjust
- rstrip
- rpartition
- splitlines
- strip
- swapcase
- translate
- upper
- startswith
- endswith
- removeprefix
- removesuffix
- isascii
- islower
- isupper
- istitle
- isspace
- isdecimal
- isdigit
- isnumeric
- isalpha
- isalnum
- isidentifier
- isprintable
- zfill
- format
- format_map
- maketrans
19class OpenAIModule(Module): 20 def __init__(self) -> None: 21 super().__init__() 22 self.client = OpenAI(api_key=OPENAI_API_KEY) # type: ignore 23 24 @endpoint 25 def prompt(self, text: str, model: OpenAIModels): 26 response = self.client.chat.completions.create( # type: ignore 27 model=model, 28 response_format={"type": "json_object"}, 29 messages=[ 30 { 31 "role": "system", 32 "content": "You are a helpful assistant designed to output JSON.", 33 }, 34 {"role": "user", "content": text}, 35 ], 36 ) 37 answers: list[dict[str, str]] = [] 38 for msg in response.choices: # type: ignore 39 finish_reason = msg.finish_reason # type: ignore 40 if finish_reason != "stop": 41 raise HTTPException(418, finish_reason) 42 content = msg.message.content # type: ignore 43 if content: 44 answers.append(json.loads(content)) # type: ignore 45 46 return {"Answer": answers}
24 @endpoint 25 def prompt(self, text: str, model: OpenAIModels): 26 response = self.client.chat.completions.create( # type: ignore 27 model=model, 28 response_format={"type": "json_object"}, 29 messages=[ 30 { 31 "role": "system", 32 "content": "You are a helpful assistant designed to output JSON.", 33 }, 34 {"role": "user", "content": text}, 35 ], 36 ) 37 answers: list[dict[str, str]] = [] 38 for msg in response.choices: # type: ignore 39 finish_reason = msg.finish_reason # type: ignore 40 if finish_reason != "stop": 41 raise HTTPException(418, finish_reason) 42 content = msg.message.content # type: ignore 43 if content: 44 answers.append(json.loads(content)) # type: ignore 45 46 return {"Answer": answers}