32
32
# Bulk actions
33
33
#####################################################
34
34
35
+
35
36
def get_epics_from_bulk (bulk_data , ** additional_fields ):
36
37
"""Convert `bulk_data` into a list of epics.
37
38
@@ -40,8 +41,10 @@ def get_epics_from_bulk(bulk_data, **additional_fields):
40
41
41
42
:return: List of `Epic` instances.
42
43
"""
43
- return [models .Epic (subject = line , ** additional_fields )
44
- for line in text .split_in_lines (bulk_data )]
44
+ return [
45
+ models .Epic (subject = line , ** additional_fields )
46
+ for line in text .split_in_lines (bulk_data )
47
+ ]
45
48
46
49
47
50
def create_epics_in_bulk (bulk_data , callback = None , precall = None , ** additional_fields ):
@@ -79,9 +82,9 @@ def update_epics_order_in_bulk(bulk_data: list, field: str, project: object):
79
82
apply_order_updates (epic_orders , new_epic_orders )
80
83
81
84
epic_ids = epic_orders .keys ()
82
- events .emit_event_for_ids (ids = epic_ids ,
83
- content_type = "epics.epic" ,
84
- projectid = project . pk )
85
+ events .emit_event_for_ids (
86
+ ids = epic_ids , content_type = "epics.epic" , projectid = project . pk
87
+ )
85
88
86
89
db .update_attr_in_bulk_for_ids (epic_orders , field , models .Epic )
87
90
return epic_orders
@@ -111,10 +114,7 @@ def create_related_userstories_in_bulk(bulk_data, epic, **additional_fields):
111
114
related_userstories = []
112
115
for userstory in userstories :
113
116
related_userstories .append (
114
- models .RelatedUserStory (
115
- user_story = userstory ,
116
- epic = epic
117
- )
117
+ models .RelatedUserStory (user_story = userstory , epic = epic )
118
118
)
119
119
db .save_in_bulk (related_userstories )
120
120
project .update_role_points (user_stories = userstories )
@@ -137,16 +137,21 @@ def update_epic_related_userstories_order_in_bulk(bulk_data: list, epic: object)
137
137
rus_orders = {rus .id : rus .order for rus in related_user_stories }
138
138
139
139
rus_conversion = {rus .user_story_id : rus .id for rus in related_user_stories }
140
- new_rus_orders = {rus_conversion [e ["us_id" ]]: e ["order" ] for e in bulk_data
141
- if e ["us_id" ] in rus_conversion }
140
+ new_rus_orders = {
141
+ rus_conversion [e ["us_id" ]]: e ["order" ]
142
+ for e in bulk_data
143
+ if e ["us_id" ] in rus_conversion
144
+ }
142
145
143
146
apply_order_updates (rus_orders , new_rus_orders )
144
147
145
148
if rus_orders :
146
149
related_user_story_ids = rus_orders .keys ()
147
- events .emit_event_for_ids (ids = related_user_story_ids ,
148
- content_type = "epics.relateduserstory" ,
149
- projectid = epic .project_id )
150
+ events .emit_event_for_ids (
151
+ ids = related_user_story_ids ,
152
+ content_type = "epics.relateduserstory" ,
153
+ projectid = epic .project_id ,
154
+ )
150
155
151
156
db .update_attr_in_bulk_for_ids (rus_orders , "order" , models .RelatedUserStory )
152
157
@@ -157,25 +162,39 @@ def update_epic_related_userstories_order_in_bulk(bulk_data: list, epic: object)
157
162
# CSV
158
163
#####################################################
159
164
165
+
160
166
def epics_to_csv (project , queryset ):
161
167
csv_data = io .StringIO ()
162
- fieldnames = ["id" , "ref" , "subject" , "description" , "owner" , "owner_full_name" ,
163
- "assigned_to" , "assigned_to_full_name" , "status" , "epics_order" ,
164
- "client_requirement" , "team_requirement" , "attachments" , "tags" ,
165
- "watchers" , "voters" , "created_date" , "modified_date" ,
166
- "related_user_stories" ]
168
+ fieldnames = [
169
+ "id" ,
170
+ "ref" ,
171
+ "subject" ,
172
+ "description" ,
173
+ "owner" ,
174
+ "owner_full_name" ,
175
+ "assigned_to" ,
176
+ "assigned_to_full_name" ,
177
+ "status" ,
178
+ "epics_order" ,
179
+ "client_requirement" ,
180
+ "team_requirement" ,
181
+ "attachments" ,
182
+ "tags" ,
183
+ "watchers" ,
184
+ "voters" ,
185
+ "created_date" ,
186
+ "modified_date" ,
187
+ "related_user_stories" ,
188
+ ]
167
189
168
190
custom_attrs = project .epiccustomattributes .all ()
169
191
for custom_attr in custom_attrs :
170
192
fieldnames .append (custom_attr .name )
171
193
172
- queryset = queryset .prefetch_related ("attachments" ,
173
- "custom_attributes_values" ,
174
- "user_stories__project" )
175
- queryset = queryset .select_related ("owner" ,
176
- "assigned_to" ,
177
- "status" ,
178
- "project" )
194
+ queryset = queryset .prefetch_related (
195
+ "attachments" , "custom_attributes_values" , "user_stories__project"
196
+ )
197
+ queryset = queryset .select_related ("owner" , "assigned_to" , "status" , "project" )
179
198
180
199
queryset = attach_total_voters_to_queryset (queryset )
181
200
queryset = attach_watchers_to_queryset (queryset )
@@ -186,12 +205,20 @@ def epics_to_csv(project, queryset):
186
205
epic_data = {
187
206
"id" : epic .id ,
188
207
"ref" : epic .ref ,
189
- "subject" : epic .subject ,
190
- "description" : epic .description ,
208
+ "subject" : text . sanitize_csv_text_value ( epic .subject ) ,
209
+ "description" : text . sanitize_csv_text_value ( epic .description ) ,
191
210
"owner" : epic .owner .username if epic .owner else None ,
192
- "owner_full_name" : epic .owner .get_full_name () if epic .owner else None ,
211
+ "owner_full_name" : (
212
+ text .sanitize_csv_text_value (epic .owner .get_full_name ())
213
+ if epic .owner
214
+ else None
215
+ ),
193
216
"assigned_to" : epic .assigned_to .username if epic .assigned_to else None ,
194
- "assigned_to_full_name" : epic .assigned_to .get_full_name () if epic .assigned_to else None ,
217
+ "assigned_to_full_name" : (
218
+ text .sanitize_csv_text_value (epic .assigned_to .get_full_name ())
219
+ if epic .assigned_to
220
+ else None
221
+ ),
195
222
"status" : epic .status .name if epic .status else None ,
196
223
"epics_order" : epic .epics_order ,
197
224
"client_requirement" : epic .client_requirement ,
@@ -202,16 +229,21 @@ def epics_to_csv(project, queryset):
202
229
"voters" : epic .total_voters ,
203
230
"created_date" : epic .created_date ,
204
231
"modified_date" : epic .modified_date ,
205
- "related_user_stories" : "," .join ([
206
- "{}#{}" .format (us .project .slug , us .ref ) for us in epic .user_stories .all ()
207
- ]),
232
+ "related_user_stories" : "," .join (
233
+ [
234
+ "{}#{}" .format (us .project .slug , us .ref )
235
+ for us in epic .user_stories .all ()
236
+ ]
237
+ ),
208
238
}
209
239
210
240
for custom_attr in custom_attrs :
211
241
if not hasattr (epic , "custom_attributes_values" ):
212
242
continue
213
- value = epic .custom_attributes_values .attributes_values .get (str (custom_attr .id ), None )
214
- epic_data [custom_attr .name ] = value
243
+ value = epic .custom_attributes_values .attributes_values .get (
244
+ str (custom_attr .id ), None
245
+ )
246
+ epic_data [custom_attr .name ] = text .sanitize_csv_text_value (value )
215
247
216
248
writer .writerow (epic_data )
217
249
@@ -222,8 +254,11 @@ def epics_to_csv(project, queryset):
222
254
# Api filter data
223
255
#####################################################
224
256
257
+
225
258
def _get_epics_statuses (project , queryset ):
226
- compiler = connection .ops .compiler (queryset .query .compiler )(queryset .query , connection , None )
259
+ compiler = connection .ops .compiler (queryset .query .compiler )(
260
+ queryset .query , connection , None
261
+ )
227
262
queryset_where_tuple = queryset .query .where .as_sql (compiler , connection )
228
263
where = queryset_where_tuple [0 ]
229
264
where_params = queryset_where_tuple [1 ]
@@ -241,26 +276,32 @@ def _get_epics_statuses(project, queryset):
241
276
FROM "projects_epicstatus"
242
277
WHERE "projects_epicstatus"."project_id" = %s
243
278
ORDER BY "projects_epicstatus"."order";
244
- """ .format (where = where )
279
+ """ .format (
280
+ where = where
281
+ )
245
282
246
283
with closing (connection .cursor ()) as cursor :
247
284
cursor .execute (extra_sql , where_params + [project .id ])
248
285
rows = cursor .fetchall ()
249
286
250
287
result = []
251
288
for id , name , color , order , count in rows :
252
- result .append ({
253
- "id" : id ,
254
- "name" : _ (name ),
255
- "color" : color ,
256
- "order" : order ,
257
- "count" : count ,
258
- })
289
+ result .append (
290
+ {
291
+ "id" : id ,
292
+ "name" : _ (name ),
293
+ "color" : color ,
294
+ "order" : order ,
295
+ "count" : count ,
296
+ }
297
+ )
259
298
return sorted (result , key = itemgetter ("order" ))
260
299
261
300
262
301
def _get_epics_assigned_to (project , queryset ):
263
- compiler = connection .ops .compiler (queryset .query .compiler )(queryset .query , connection , None )
302
+ compiler = connection .ops .compiler (queryset .query .compiler )(
303
+ queryset .query , connection , None
304
+ )
264
305
queryset_where_tuple = queryset .query .where .as_sql (compiler , connection )
265
306
where = queryset_where_tuple [0 ]
266
307
where_params = queryset_where_tuple [1 ]
@@ -292,7 +333,9 @@ def _get_epics_assigned_to(project, queryset):
292
333
INNER JOIN "projects_project" ON ("epics_epic"."project_id" = "projects_project"."id")
293
334
WHERE {where} AND "epics_epic"."assigned_to_id" IS NULL
294
335
GROUP BY assigned_to_id
295
- """ .format (where = where )
336
+ """ .format (
337
+ where = where
338
+ )
296
339
297
340
with closing (connection .cursor ()) as cursor :
298
341
cursor .execute (extra_sql , where_params + [project .id ] + where_params )
@@ -301,28 +344,34 @@ def _get_epics_assigned_to(project, queryset):
301
344
result = []
302
345
none_valued_added = False
303
346
for id , full_name , username , count in rows :
304
- result .append ({
305
- "id" : id ,
306
- "full_name" : full_name or username or "" ,
307
- "count" : count ,
308
- })
347
+ result .append (
348
+ {
349
+ "id" : id ,
350
+ "full_name" : full_name or username or "" ,
351
+ "count" : count ,
352
+ }
353
+ )
309
354
310
355
if id is None :
311
356
none_valued_added = True
312
357
313
358
# If there was no epic with null assigned_to we manually add it
314
359
if not none_valued_added :
315
- result .append ({
316
- "id" : None ,
317
- "full_name" : "" ,
318
- "count" : 0 ,
319
- })
360
+ result .append (
361
+ {
362
+ "id" : None ,
363
+ "full_name" : "" ,
364
+ "count" : 0 ,
365
+ }
366
+ )
320
367
321
368
return sorted (result , key = itemgetter ("full_name" ))
322
369
323
370
324
371
def _get_epics_owners (project , queryset ):
325
- compiler = connection .ops .compiler (queryset .query .compiler )(queryset .query , connection , None )
372
+ compiler = connection .ops .compiler (queryset .query .compiler )(
373
+ queryset .query , connection , None
374
+ )
326
375
queryset_where_tuple = queryset .query .where .as_sql (compiler , connection )
327
376
where = queryset_where_tuple [0 ]
328
377
where_params = queryset_where_tuple [1 ]
@@ -357,7 +406,9 @@ def _get_epics_owners(project, queryset):
357
406
FROM users_user
358
407
LEFT OUTER JOIN counters ON ("users_user"."id" = "counters"."owner_id")
359
408
WHERE ("users_user"."is_system" IS TRUE)
360
- """ .format (where = where )
409
+ """ .format (
410
+ where = where
411
+ )
361
412
362
413
with closing (connection .cursor ()) as cursor :
363
414
cursor .execute (extra_sql , where_params + [project .id ])
@@ -366,16 +417,20 @@ def _get_epics_owners(project, queryset):
366
417
result = []
367
418
for id , full_name , username , count in rows :
368
419
if count > 0 :
369
- result .append ({
370
- "id" : id ,
371
- "full_name" : full_name or username or "" ,
372
- "count" : count ,
373
- })
420
+ result .append (
421
+ {
422
+ "id" : id ,
423
+ "full_name" : full_name or username or "" ,
424
+ "count" : count ,
425
+ }
426
+ )
374
427
return sorted (result , key = itemgetter ("full_name" ))
375
428
376
429
377
430
def _get_epics_tags (project , queryset ):
378
- compiler = connection .ops .compiler (queryset .query .compiler )(queryset .query , connection , None )
431
+ compiler = connection .ops .compiler (queryset .query .compiler )(
432
+ queryset .query , connection , None
433
+ )
379
434
queryset_where_tuple = queryset .query .where .as_sql (compiler , connection )
380
435
where = queryset_where_tuple [0 ]
381
436
where_params = queryset_where_tuple [1 ]
@@ -401,19 +456,23 @@ def _get_epics_tags(project, queryset):
401
456
FROM project_tags
402
457
LEFT JOIN epics_tags ON project_tags.tag_color[1] = epics_tags.tag
403
458
ORDER BY tag
404
- """ .format (where = where )
459
+ """ .format (
460
+ where = where
461
+ )
405
462
406
463
with closing (connection .cursor ()) as cursor :
407
464
cursor .execute (extra_sql , where_params + [project .id ])
408
465
rows = cursor .fetchall ()
409
466
410
467
result = []
411
468
for name , color , count in rows :
412
- result .append ({
413
- "name" : name ,
414
- "color" : color ,
415
- "count" : count ,
416
- })
469
+ result .append (
470
+ {
471
+ "name" : name ,
472
+ "color" : color ,
473
+ "count" : count ,
474
+ }
475
+ )
417
476
return sorted (result , key = itemgetter ("name" ))
418
477
419
478
@@ -422,11 +481,13 @@ def get_epics_filters_data(project, querysets):
422
481
Given a project and an epics queryset, return a simple data structure
423
482
of all possible filters for the epics in the queryset.
424
483
"""
425
- data = OrderedDict ([
426
- ("statuses" , _get_epics_statuses (project , querysets ["statuses" ])),
427
- ("assigned_to" , _get_epics_assigned_to (project , querysets ["assigned_to" ])),
428
- ("owners" , _get_epics_owners (project , querysets ["owners" ])),
429
- ("tags" , _get_epics_tags (project , querysets ["tags" ])),
430
- ])
484
+ data = OrderedDict (
485
+ [
486
+ ("statuses" , _get_epics_statuses (project , querysets ["statuses" ])),
487
+ ("assigned_to" , _get_epics_assigned_to (project , querysets ["assigned_to" ])),
488
+ ("owners" , _get_epics_owners (project , querysets ["owners" ])),
489
+ ("tags" , _get_epics_tags (project , querysets ["tags" ])),
490
+ ]
491
+ )
431
492
432
493
return data
0 commit comments