-
-
Save hardikm9850/e7387d46f4f769e14651939445ae8c1d to your computer and use it in GitHub Desktop.
perfetto natural language query
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| GEMINI_API_KEY= | |
| LOCAL_LLM_BASE_URL= |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import argparse | |
| import os | |
| import google.generativeai as genai | |
| import ollama | |
| from dotenv import load_dotenv | |
| from perfetto.trace_processor import TraceProcessor | |
| load_dotenv() | |
| def get_schema(tp): | |
| """Gets the schema of the trace file.""" | |
| schema = {} | |
| tables_query = tp.query("select name from sqlite_master where type='table'") | |
| for table in tables_query: | |
| table_name = table.name | |
| columns = [] | |
| columns_query = tp.query(f"pragma table_info('{table_name}')") | |
| for column in columns_query: | |
| columns.append(column.name) | |
| schema[table_name] = columns | |
| return schema | |
| def generate_sql(natural_language_query, schema, use_local_llm=False, model_name='llama2'): | |
| """ | |
| This function will use the Gemini API or a local LLM to generate SQL from a natural language query. | |
| """ | |
| prompt = f""" | |
| You are a Perfetto SQL expert. Your task is to convert a natural language query | |
| into a Perfetto SQL query. | |
| Here is the schema of the trace file: | |
| {schema} | |
| When joining `__intrinsic_thread` and `__intrinsic_process` tables, the join condition is `__intrinsic_thread.upid = __intrinsic_process.id`. | |
| Natural language query: | |
| {natural_language_query} | |
| Respond with only the Perfetto SQL query, and nothing else. | |
| """ | |
| if use_local_llm: | |
| response = ollama.chat( | |
| model=model_name, | |
| messages=[ | |
| {'role': 'user', 'content': prompt} | |
| ] | |
| ) | |
| sql_query = response['message']['content'] | |
| else: | |
| genai.configure(api_key=os.environ["GEMINI_API_KEY"]) | |
| model = genai.GenerativeModel('gemini-1.5-flash') | |
| response = model.generate_content(prompt) | |
| sql_query = response.text | |
| # clean up the response | |
| sql_query = sql_query.replace("```sql", "").replace("```", "") | |
| return sql_query | |
| def main(): | |
| parser = argparse.ArgumentParser(description="Query Perfetto traces using natural language.") | |
| parser.add_argument("trace_file", help="Path to the Perfetto trace file.") | |
| parser.add_argument("query", help="Natural language query.") | |
| parser.add_argument("--local", action="store_true", help="Use a local LLM instead of the Gemini API.") | |
| parser.add_argument("--model", default="llama2", help="The name of the model to use with Ollama.") | |
| args = parser.parse_args() | |
| if not os.path.exists(args.trace_file): | |
| print(f"Error: Trace file not found at {args.trace_file}") | |
| return | |
| tp = TraceProcessor(trace=args.trace_file) | |
| schema = get_schema(tp) | |
| print(f"schema: {schema}") | |
| sql_query = generate_sql(args.query, schema, use_local_llm=args.local, model_name=args.model) | |
| print(f"Generated SQL: {sql_query}") | |
| # this will query perfetto locally, use sql above directly if using web UI. | |
| query_result = tp.query(sql_query) | |
| # print in a human-readable format | |
| column_names = query_result.column_names | |
| # Print header | |
| print(" | ".join(column_names)) | |
| print("-" * (sum(len(c) for c in column_names) + 3 * len(column_names))) | |
| for row in query_result: | |
| row_values = [] | |
| for col in column_names: | |
| row_values.append(str(getattr(row, col))) | |
| print(" | ".join(row_values)) | |
| if __name__ == "__main__": | |
| main() | |
| # python3 main.py trace-file-name.pftrace "your prompt" | |
| # python3 main.py trace-file-name.pftrace "your prompt" --local --model=gpt-oss:20b | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| perfetto | |
| google-generativeai | |
| python-dotenv | |
| openai | |
| ollama |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment