Skip to content

Commit 01c39a9

Browse files
juangallostraramonski
authored andcommitted
Fix worksheet count on dashboard when filtering by department (#696)
* Follow PEP8 style guidelines * Add getDepartmentUIDs keyword index to worksheet catalog This is to make filtering by department work with worksheets catalog. * Update CHANGES.rst
1 parent 2362d31 commit 01c39a9

File tree

4 files changed

+42
-37
lines changed

4 files changed

+42
-37
lines changed

CHANGES.rst

+1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ Changelog
1515

1616
**Fixed**
1717

18+
- #696 Filter worksheets by department. The worksheet count in the dashboard is now properly updated accordingly to the selected departments
1819

1920
**Security**
2021

bika/lims/browser/dashboard/dashboard.py

+36-36
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,8 @@
3838
# Supported periodicities for evolution charts
3939
PERIODICITY_DAILY = "d"
4040
PERIODICITY_WEEKLY = "w"
41-
PERIODICITY_MONTHLY= "m"
42-
PERIODICITY_QUARTERLY= "q"
41+
PERIODICITY_MONTHLY = "m"
42+
PERIODICITY_QUARTERLY = "q"
4343
PERIODICITY_BIANNUAL = "b"
4444
PERIODICITY_YEARLY = "y"
4545
PERIODICITY_ALL = "a"
@@ -182,7 +182,7 @@ def __call__(self):
182182
return
183183

184184
self.member = mtool.getAuthenticatedMember()
185-
self.periodicity = self.request.get('p', PERIODICITY_WEEKLY)
185+
self.periodicity = self.request.get('p', PERIODICITY_WEEKLY)
186186
self.dashboard_cookie = self.check_dashboard_cookie()
187187
date_range = self.get_date_range(self.periodicity)
188188
self.date_from = date_range[0]
@@ -260,47 +260,47 @@ def get_date_range(self, periodicity=PERIODICITY_WEEKLY):
260260
:rtype: [(DateTime, DateTime)]
261261
"""
262262
today = datetime.date.today()
263-
if (periodicity == PERIODICITY_DAILY):
263+
if periodicity == PERIODICITY_DAILY:
264264
# Daily, load last 30 days
265265
date_from = DateTime() - 30
266266
date_to = DateTime() + 1
267-
return (date_from, date_to)
267+
return date_from, date_to
268268

269-
if (periodicity == PERIODICITY_MONTHLY):
269+
if periodicity == PERIODICITY_MONTHLY:
270270
# Monthly, load last 2 years
271271
min_year = today.year - 1 if today.month == 12 else today.year - 2
272272
min_month = 1 if today.month == 12 else today.month
273273
date_from = DateTime(min_year, min_month, 1)
274274
date_to = DateTime(today.year, today.month,
275275
monthrange(today.year, today.month)[1],
276276
23, 59, 59)
277-
return (date_from, date_to)
277+
return date_from, date_to
278278

279-
if (periodicity == PERIODICITY_QUARTERLY):
279+
if periodicity == PERIODICITY_QUARTERLY:
280280
# Quarterly, load last 4 years
281281
m = (((today.month - 1) / 3) * 3) + 1
282282
min_year = today.year - 4 if today.month == 12 else today.year - 5
283283
date_from = DateTime(min_year, m, 1)
284284
date_to = DateTime(today.year, m + 2,
285285
monthrange(today.year, m + 2)[1], 23, 59,
286286
59)
287-
return (date_from, date_to)
288-
if (periodicity == PERIODICITY_BIANNUAL):
287+
return date_from, date_to
288+
if periodicity == PERIODICITY_BIANNUAL:
289289
# Biannual, load last 10 years
290290
m = (((today.month - 1) / 6) * 6) + 1
291291
min_year = today.year - 10 if today.month == 12 else today.year - 11
292292
date_from = DateTime(min_year, m, 1)
293293
date_to = DateTime(today.year, m + 5,
294294
monthrange(today.year, m + 5)[1], 23, 59,
295295
59)
296-
return (date_from, date_to)
296+
return date_from, date_to
297297

298-
if (periodicity in [PERIODICITY_YEARLY, PERIODICITY_ALL]):
298+
if periodicity in [PERIODICITY_YEARLY, PERIODICITY_ALL]:
299299
# Yearly or All time, load last 15 years
300300
min_year = today.year - 15 if today.month == 12 else today.year - 16
301301
date_from = DateTime(min_year, 1, 1)
302302
date_to = DateTime(today.year, 12, 31, 23, 59, 59)
303-
return (date_from, date_to)
303+
return date_from, date_to
304304

305305
# Default Weekly, load last six months
306306
year, weeknum, dow = today.isocalendar()
@@ -309,7 +309,7 @@ def get_date_range(self, periodicity=PERIODICITY_WEEKLY):
309309
else (today.month - 6) + 12
310310
date_from = DateTime(min_year, min_month, 1)
311311
date_to = DateTime() - dow + 7
312-
return (date_from, date_to)
312+
return date_from, date_to
313313

314314
def get_sections(self):
315315
""" Returns an array with the sections to be displayed.
@@ -356,7 +356,7 @@ def _getStatistics(self, name, description, url, catalog, criterias, total):
356356
results = results if total >= results else total
357357
ratio = (float(results)/float(total))*100 if results > 0 else 0
358358
ratio = str("%%.%sf" % 1) % ratio
359-
out['legend'] = _('of') + " " + str(total) + ' (' + ratio +'%)'
359+
out['legend'] = _('of') + " " + str(total) + ' (' + ratio + '%)'
360360
out['number'] = results
361361
out['percentage'] = float(ratio)
362362
return out
@@ -373,7 +373,7 @@ def get_analysisrequests_section(self):
373373
filtering_allowed = self.context.bika_setup.getAllowDepartmentFiltering()
374374
if filtering_allowed:
375375
cookie_dep_uid = self.request.get(FILTER_BY_DEPT_COOKIE_ID, '').split(',') if filtering_allowed else ''
376-
query['getDepartmentUIDs'] = { "query": cookie_dep_uid,"operator":"or" }
376+
query['getDepartmentUIDs'] = {"query": cookie_dep_uid, "operator": "or"}
377377

378378
# Check if dashboard_cookie contains any values to query
379379
# elements by
@@ -383,70 +383,70 @@ def get_analysisrequests_section(self):
383383
total = self.search_count(query, catalog.id)
384384

385385
# Sampling workflow enabled?
386-
if (self.context.bika_setup.getSamplingWorkflowEnabled()):
386+
if self.context.bika_setup.getSamplingWorkflowEnabled():
387387
# Analysis Requests awaiting to be sampled or scheduled
388388
name = _('Analysis Requests to be sampled')
389389
desc = _("To be sampled")
390390
purl = 'samples?samples_review_state=to_be_sampled'
391391
query['review_state'] = ['to_be_sampled', ]
392-
query['cancellation_state'] = ['active',]
392+
query['cancellation_state'] = ['active', ]
393393
out.append(self._getStatistics(name, desc, purl, catalog, query, total))
394394

395395
# Analysis Requests awaiting to be preserved
396396
name = _('Analysis Requests to be preserved')
397397
desc = _("To be preserved")
398398
purl = 'samples?samples_review_state=to_be_preserved'
399399
query['review_state'] = ['to_be_preserved', ]
400-
query['cancellation_state'] = ['active',]
400+
query['cancellation_state'] = ['active', ]
401401
out.append(self._getStatistics(name, desc, purl, catalog, query, total))
402402

403403
# Analysis Requests scheduled for Sampling
404404
name = _('Analysis Requests scheduled for sampling')
405405
desc = _("Sampling scheduled")
406406
purl = 'samples?samples_review_state=scheduled_sampling'
407407
query['review_state'] = ['scheduled_sampling', ]
408-
query['cancellation_state'] = ['active',]
408+
query['cancellation_state'] = ['active', ]
409409
out.append(self._getStatistics(name, desc, purl, catalog, query, total))
410410

411411
# Analysis Requests awaiting for reception
412412
name = _('Analysis Requests to be received')
413413
desc = _("Reception pending")
414414
purl = 'analysisrequests?analysisrequests_review_state=sample_due'
415415
query['review_state'] = ['sample_due', ]
416-
query['cancellation_state'] = ['active',]
416+
query['cancellation_state'] = ['active', ]
417417
out.append(self._getStatistics(name, desc, purl, catalog, query, total))
418418

419419
# Analysis Requests under way
420420
name = _('Analysis Requests with results pending')
421421
desc = _("Results pending")
422422
purl = 'analysisrequests?analysisrequests_review_state=sample_received'
423423
query['review_state'] = ['attachment_due',
424-
'sample_received',]
425-
query['cancellation_state'] = ['active',]
424+
'sample_received', ]
425+
query['cancellation_state'] = ['active', ]
426426
out.append(self._getStatistics(name, desc, purl, catalog, query, total))
427427

428428
# Analysis Requests to be verified
429429
name = _('Analysis Requests to be verified')
430430
desc = _("To be verified")
431431
purl = 'analysisrequests?analysisrequests_review_state=to_be_verified'
432432
query['review_state'] = ['to_be_verified', ]
433-
query['cancellation_state'] = ['active',]
433+
query['cancellation_state'] = ['active', ]
434434
out.append(self._getStatistics(name, desc, purl, catalog, query, total))
435435

436436
# Analysis Requests verified (to be published)
437437
name = _('Analysis Requests verified')
438438
desc = _("Verified")
439439
purl = 'analysisrequests?analysisrequests_review_state=verified'
440440
query['review_state'] = ['verified', ]
441-
query['cancellation_state'] = ['active',]
441+
query['cancellation_state'] = ['active', ]
442442
out.append(self._getStatistics(name, desc, purl, catalog, query, total))
443443

444444
# Analysis Requests published
445445
name = _('Analysis Requests published')
446446
desc = _("Published")
447447
purl = 'analysisrequests?analysisrequests_review_state=published'
448448
query['review_state'] = ['published', ]
449-
query['cancellation_state'] = ['active',]
449+
query['cancellation_state'] = ['active', ]
450450
out.append(self._getStatistics(name, desc, purl, catalog, query, total))
451451

452452
# Analysis Requests to be printed
@@ -456,7 +456,7 @@ def get_analysisrequests_section(self):
456456
purl = 'analysisrequests?analysisrequests_getPrinted=0'
457457
query['getPrinted'] = '0'
458458
query['review_state'] = ['published', ]
459-
query['cancellation_state'] = ['active',]
459+
query['cancellation_state'] = ['active', ]
460460
out.append(
461461
self._getStatistics(name, desc, purl, catalog, query, total))
462462

@@ -481,11 +481,11 @@ def get_worksheets_section(self):
481481
"""
482482
out = []
483483
bc = getToolByName(self.context, CATALOG_WORKSHEET_LISTING)
484-
query = {'portal_type':"Worksheet",}
484+
query = {'portal_type': "Worksheet", }
485485
filtering_allowed = self.context.bika_setup.getAllowDepartmentFiltering()
486486
if filtering_allowed:
487487
cookie_dep_uid = self.request.get(FILTER_BY_DEPT_COOKIE_ID, '').split(',') if filtering_allowed else ''
488-
query['getDepartmentUIDs'] = { "query": cookie_dep_uid,"operator":"or" }
488+
query['getDepartmentUIDs'] = {"query": cookie_dep_uid, "operator": "or"}
489489

490490
# Check if dashboard_cookie contains any values to query
491491
# elements by
@@ -503,14 +503,14 @@ def get_worksheets_section(self):
503503

504504
# Worksheets to be verified
505505
name = _('To be verified')
506-
desc =_('To be verified')
506+
desc = _('To be verified')
507507
purl = 'worksheets?list_review_state=to_be_verified'
508508
query['review_state'] = ['to_be_verified', ]
509509
out.append(self._getStatistics(name, desc, purl, bc, query, total))
510510

511511
# Worksheets verified
512512
name = _('Verified')
513-
desc =_('Verified')
513+
desc = _('Verified')
514514
purl = 'worksheets?list_review_state=verified'
515515
query['review_state'] = ['verified', ]
516516
out.append(self._getStatistics(name, desc, purl, bc, query, total))
@@ -793,7 +793,7 @@ def fill_dates_evo(self, catalog, query):
793793
def _fill_dates_evo_cachekey(method, self, query_json, catalog_name,
794794
periodicity):
795795
hour = time() // (60 * 60 * 2)
796-
return (hour, catalog_name, query_json, periodicity)
796+
return hour, catalog_name, query_json, periodicity
797797

798798
@ram.cache(_fill_dates_evo_cachekey)
799799
def _fill_dates_evo(self, query_json, catalog_name, periodicity):
@@ -834,14 +834,14 @@ def _fill_dates_evo(self, query_json, catalog_name, periodicity):
834834
stats = statesmap.values()
835835
stats.sort()
836836
stats.append(otherstate)
837-
statscount = {s:0 for s in stats}
837+
statscount = {s: 0 for s in stats}
838838
# Add first all periods, cause we want all segments to be displayed
839839
curr = date_from.asdatetime()
840840
end = date_to.asdatetime()
841841
while curr < end:
842842
currstr = self._getDateStr(periodicity, DateTime(curr))
843843
if currstr not in outevoidx:
844-
outdict = {'date':currstr}
844+
outdict = {'date': currstr}
845845
for k in stats:
846846
outdict[k] = 0
847847
outevo.append(outdict)
@@ -866,11 +866,11 @@ def _fill_dates_evo(self, query_json, catalog_name, periodicity):
866866
else:
867867
# Create new row
868868
currow = {'date': created,
869-
state: 1 }
869+
state: 1}
870870
outevo.append(currow)
871871

872872
# Remove all those states for which there is no data
873-
rstates = [k for k,v in statscount.items() if v==0]
873+
rstates = [k for k, v in statscount.items() if v == 0]
874874
for o in outevo:
875875
for r in rstates:
876876
if r in o:

bika/lims/catalog/worksheet_catalog.py

+1
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
'getAnalyst': 'FieldIndex',
2424
'getWorksheetTemplateTitle': 'FieldIndex',
2525
'getAnalysesUIDs': 'KeywordIndex',
26+
'getDepartmentUIDs': 'KeywordIndex',
2627
}
2728
# Defining the columns for this catalog
2829
_columns_list = [

bika/lims/upgrade/v01_02_004.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from bika.lims.config import PROJECTNAME as product
99
from bika.lims.upgrade import upgradestep
1010
from bika.lims.upgrade.utils import UpgradeUtils
11+
from bika.lims.catalog.worksheet_catalog import CATALOG_WORKSHEET_LISTING
1112

1213
version = '1.2.4' # Remember version number in metadata.xml and setup.py
1314
profile = 'profile-{0}:default'.format(product)
@@ -26,7 +27,9 @@ def upgrade(tool):
2627

2728
logger.info("Upgrading {0}: {1} -> {2}".format(product, ver_from, version))
2829

29-
# -------- ADD YOUR STUFF HERE --------
30+
# Required to make filtering by department in worksheets work
31+
ut.addIndex(CATALOG_WORKSHEET_LISTING, 'getDepartmentUIDs', 'KeywordIndex')
32+
ut.refreshCatalogs()
3033

3134
logger.info("{0} upgraded to version {1}".format(product, version))
3235

0 commit comments

Comments
 (0)