3434logger = logging .getLogger (__name__ )
3535
3636
37- def execute_trace_query_chart (
38- * ,
39- org_id : int ,
40- query : str ,
41- stats_period : str ,
42- y_axes : list [str ],
43- group_by : list [str ] | None = None ,
44- project_ids : list [int ] | None = None ,
45- ) -> dict [str , Any ] | None :
46- """
47- Execute a trace query to get chart/timeseries data by calling the events-stats endpoint.
48- """
49- try :
50- organization = Organization .objects .get (id = org_id )
51- except Organization .DoesNotExist :
52- logger .warning ("Organization not found" , extra = {"org_id" : org_id })
53- return None
54-
55- # Use provided project_ids or get all project IDs for the organization
56- if project_ids is None :
57- project_ids = list (organization .project_set .values_list ("id" , flat = True ))
58- if not project_ids :
59- logger .warning ("No projects found for organization" , extra = {"org_id" : org_id })
60- return None
61-
62- params : dict [str , Any ] = {
63- "query" : query ,
64- "statsPeriod" : stats_period ,
65- "yAxis" : y_axes ,
66- "project" : project_ids ,
67- "dataset" : "spans" ,
68- "referrer" : Referrer .SEER_RPC ,
69- "transformAliasToInputFormat" : "1" , # Required for RPC datasets
70- }
71-
72- # Add group_by if provided (for top events)
73- if group_by and len (group_by ) > 0 :
74- params ["topEvents" ] = 5
75- params ["field" ] = group_by
76- params ["excludeOther" ] = "0" # Include "Other" series
77-
78- resp = client .get (
79- auth = ApiKey (organization_id = organization .id , scope_list = ["org:read" , "project:read" ]),
80- user = None ,
81- path = f"/organizations/{ organization .slug } /events-stats/" ,
82- params = params ,
83- )
84- data = resp .data
85-
86- # Always normalize to the nested {"metric": {"data": [...]}} format for consistency
87- metric_is_single = len (y_axes ) == 1
88- metric_name = y_axes [0 ] if metric_is_single else None
89- if metric_name and metric_is_single :
90- # Handle grouped data with single metric: wrap each group's data in the metric name
91- if group_by :
92- return {
93- group_value : (
94- {metric_name : group_data }
95- if isinstance (group_data , dict ) and "data" in group_data
96- else group_data
97- )
98- for group_value , group_data in data .items ()
99- }
100-
101- # Handle non-grouped data with single metric: wrap data in the metric name
102- if isinstance (data , dict ) and "data" in data :
103- return {metric_name : data }
104-
105- return data
106-
107-
108- def execute_trace_query_table (
109- * ,
110- org_id : int ,
111- query : str ,
112- stats_period : str ,
113- sort : str ,
114- group_by : list [str ] | None = None ,
115- y_axes : list [str ] | None = None ,
116- per_page : int = 50 ,
117- mode : Literal ["spans" , "aggregates" ] = "spans" ,
118- project_ids : list [int ] | None = None ,
119- ) -> dict [str , Any ] | None :
120- """
121- Execute a trace query to get table data by calling the events endpoint.
122- """
123- try :
124- organization = Organization .objects .get (id = org_id )
125- except Organization .DoesNotExist :
126- logger .warning ("Organization not found" , extra = {"org_id" : org_id })
127- return None
128-
129- # Use provided project_ids or get all project IDs for the organization
130- if project_ids is None :
131- project_ids = list (organization .project_set .values_list ("id" , flat = True ))
132- if not project_ids :
133- logger .warning ("No projects found for organization" , extra = {"org_id" : org_id })
134- return None
135-
136- # Determine fields based on mode
137- if mode == "aggregates" :
138- # Aggregates mode: group_by fields + aggregate functions
139- fields = []
140- if group_by :
141- fields .extend (group_by )
142- if y_axes :
143- fields .extend (y_axes )
144- else :
145- # Samples mode: default span fields
146- fields = [
147- "id" ,
148- "span.op" ,
149- "span.description" ,
150- "span.duration" ,
151- "transaction" ,
152- "timestamp" ,
153- "project" ,
154- "trace" ,
155- ]
156-
157- params : dict [str , Any ] = {
158- "query" : query ,
159- "statsPeriod" : stats_period ,
160- "field" : fields ,
161- "sort" : sort if sort else ("-timestamp" if not group_by else None ),
162- "per_page" : per_page ,
163- "project" : project_ids ,
164- "dataset" : "spans" ,
165- "referrer" : Referrer .SEER_RPC ,
166- "transformAliasToInputFormat" : "1" , # Required for RPC datasets
167- }
168-
169- # Remove None values
170- params = {k : v for k , v in params .items () if v is not None }
171-
172- resp = client .get (
173- auth = ApiKey (organization_id = organization .id , scope_list = ["org:read" , "project:read" ]),
174- user = None ,
175- path = f"/organizations/{ organization .slug } /events/" ,
176- params = params ,
177- )
178- return resp .data
179-
180-
18137def execute_table_query (
18238 * ,
18339 org_id : int ,
18440 dataset : str ,
18541 fields : list [str ],
186- query : str ,
187- sort : str ,
18842 per_page : int ,
18943 stats_period : str ,
44+ query : str | None = None ,
45+ sort : str | None = None ,
19046 project_ids : list [int ] | None = None ,
19147 project_slugs : list [str ] | None = None ,
19248 sampling_mode : SAMPLING_MODES = "NORMAL" ,
@@ -209,10 +65,16 @@ def execute_table_query(
20965 project_ids = [ALL_ACCESS_PROJECT_ID ]
21066 # Note if both project_ids and project_slugs are provided, the API request will 400.
21167
68+ if sort :
69+ # Auto-select sort field to avoid snuba errors.
70+ sort_field = sort .lstrip ("-" )
71+ if sort_field not in fields :
72+ fields .append (sort_field )
73+
21274 params : dict [str , Any ] = {
21375 "dataset" : dataset ,
21476 "field" : fields ,
215- "query" : query ,
77+ "query" : query or None ,
21678 "sort" : sort if sort else ("-timestamp" if "timestamp" in fields else None ),
21779 "per_page" : per_page ,
21880 "statsPeriod" : stats_period ,
@@ -232,7 +94,7 @@ def execute_table_query(
23294 path = f"/organizations/{ organization .slug } /events/" ,
23395 params = params ,
23496 )
235- return resp .data
97+ return { "data" : resp .data [ "data" ]}
23698
23799
238100def execute_timeseries_query (
0 commit comments