Skip to content

Commit

Permalink
add configurable temperature metaparameter
Browse files Browse the repository at this point in the history
  • Loading branch information
Dmitriusan committed Oct 27, 2023
1 parent 2b710b0 commit 7ef7487
Show file tree
Hide file tree
Showing 6 changed files with 23 additions and 8 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ consists of:
The script splits source code files into chunks (trying to split between methods), uploads these files recursively to
GPT and asks it to scan the code for vulnerabilities. Findings are then grouped and pretty-printed. Note: depending
on prompt and model used, findings may be too paranoid and consist almost entirely from false positives. You may
want to adjust `prompts/vulnerabilities/analyze` prompts to your case
want to adjust the temperature parameter and `prompts/vulnerabilities/analyze` prompts to your case
## Usage
```shell
export OPENAI_API_KEY=<your API key>
Expand Down
5 changes: 4 additions & 1 deletion app/model/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,24 @@ class Prompt:
Attributes:
system_prompt (str or None): The system user role prompt. Can be None.
user_prompt (str or None): The user role prompt. Can be None.
temperature (float): The temperature parameter for the prompt. Default is 1.0.
"""

def __init__(self, system_prompt=None, user_prompt=None, functions=None,
function_call=None):
function_call=None, temperature=1.0):
"""
Initializes a new Prompt object with user and system prompts.
Args:
system_prompt (str or None): The system user role prompt. Can be None.
user_prompt (str or None): The user role prompt. Can be None.
temperature (float): The temperature parameter for the prompt. Default is 1.0.
"""
self.system_prompt = system_prompt
self.user_prompt = user_prompt
self.functions = functions
self.function_call = function_call
self.temperature = temperature # New attribute for temperature

def __str__(self):
"""
Expand Down
4 changes: 2 additions & 2 deletions app/openai_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ def set_api_key():
# Function to generate code using OpenAI
# @retry(wait=wait_random_exponential(multiplier=1, max=40),
# stop=stop_after_attempt(3), retry=retry_if_not_exception_type(openai.InvalidRequestError)))
def get_completion(conversation: Conversation):
def get_completion(conversation: Conversation, temperature=1.0):
# https://help.openai.com/en/articles/7042661-chatgpt-api-transition-guide
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=conversation.to_messages(),
functions=conversation.history[-1].prompt.functions,
function_call=conversation.history[-1].prompt.function_call,
temperature=1,
temperature=temperature,
max_tokens=1300,
top_p=1,
frequency_penalty=0,
Expand Down
3 changes: 3 additions & 0 deletions prompts/vulnerabilities/analyze/metaparameters.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"temperature": 0.5
}
14 changes: 11 additions & 3 deletions vuln_scanner/prompt_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,16 @@ def vuln_search_prompt(target_file, code_fragment):
code_fragment=code_fragment,
)

functions_file = jinja_env.get_template('analyze/functions.json').filename
with open(functions_file, 'r') as ff:
metaparameters_file_path = jinja_env.get_template(
'analyze/metaparameters.json').filename
with open(metaparameters_file_path, 'r') as json_file:
data = json.load(json_file)
temperature = data.get("temperature", 1.0)


functions_file_path = jinja_env.get_template('analyze/functions.json').filename
with open(functions_file_path, 'r') as ff:
functions = json.load(ff)

return Prompt(system_prompt, user_prompt, functions, {"name": "store_results"})
return Prompt(system_prompt, user_prompt, functions,
{"name": "store_results"}, temperature)
3 changes: 2 additions & 1 deletion vuln_scanner/uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,8 @@ def complete(context, prompt):
# print(f"Prompt: {prompt}")
# pretty_print_conversation(prompt.to_messages())

completion_response = openai_util.get_completion(context.conversation)
completion_response = openai_util.get_completion(context.conversation,
prompt.temperature)

context.prompt_tokens_used += (
completion_response)['usage']["prompt_tokens"]
Expand Down

0 comments on commit 7ef7487

Please sign in to comment.