diff --git a/CHANGES.rst b/CHANGES.rst
index 1b2a4467c6..9ae1bc4d6e 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -7,6 +7,7 @@ Changelog
**Added**
+- #1505 Display partition link in analyses listing
- #1491 Enable Audit-logging for Dexterity Contents
- #1489 Support Multiple Catalogs for Dexterity Contents
- #1481 Filter Templates field when Sample Type is selected in Sample Add form
@@ -30,6 +31,7 @@ Changelog
**Fixed**
+- #1505 Manage Analyses Form re-applies partitioned Analyses back to the Root
- #1503 Avoid duplicate CSS IDs in multi-column Add form
- #1501 Fix Attribute Error in Reference Sample Popup
- #1493 jsonapi.read omits `include_methods` when a single parameter is used
diff --git a/bika/lims/browser/analyses/view.py b/bika/lims/browser/analyses/view.py
index f2ca83cb1d..5a81de8420 100644
--- a/bika/lims/browser/analyses/view.py
+++ b/bika/lims/browser/analyses/view.py
@@ -563,6 +563,8 @@ def folderitem(self, obj, item, index):
self._folder_item_detection_limits(obj, item)
# Fill Specifications
self._folder_item_specifications(obj, item)
+ # Fill Partition
+ self._folder_item_partition(obj, item)
# Fill Due Date and icon if late/overdue
self._folder_item_duedate(obj, item)
# Fill verification criteria
@@ -1172,6 +1174,20 @@ def _folder_item_accredited_icon(self, analysis_brain, item):
img = get_image("accredited.png", title=t(_("Accredited")))
self._append_html_element(item, "Service", img)
+ def _folder_item_partition(self, analysis_brain, item):
+ """Adds an anchor to the partition if the current analysis is from a
+ partition that does not match with the current context
+ """
+ if not IAnalysisRequest.providedBy(self.context):
+ return
+
+ sample_id = analysis_brain.getRequestID
+ if sample_id != api.get_id(self.context):
+ part_url = analysis_brain.getRequestURL
+ url = get_link(part_url, value=sample_id, **{"class": "small"})
+ title = item["replace"].get("Service") or item["Service"]
+ item["replace"]["Service"] = "{}
{}".format(title, url)
+
def _folder_item_report_visibility(self, analysis_brain, item):
"""Set if the hidden field can be edited (enabled/disabled)
diff --git a/bika/lims/browser/analysisrequest/manage_analyses.py b/bika/lims/browser/analysisrequest/manage_analyses.py
index afa74147ac..1db380950c 100644
--- a/bika/lims/browser/analysisrequest/manage_analyses.py
+++ b/bika/lims/browser/analysisrequest/manage_analyses.py
@@ -60,7 +60,6 @@ def __init__(self, context, request):
self.show_select_all_checkbox = False
self.pagesize = 999999
self.show_search = True
- self.fetch_transitions_on_select = False
self.categories = []
self.selected = []
diff --git a/bika/lims/browser/fields/aranalysesfield.py b/bika/lims/browser/fields/aranalysesfield.py
index df37ed8623..f3fbfd6082 100644
--- a/bika/lims/browser/fields/aranalysesfield.py
+++ b/bika/lims/browser/fields/aranalysesfield.py
@@ -22,21 +22,21 @@
from AccessControl import ClassSecurityInfo
from AccessControl import Unauthorized
+from Products.Archetypes.Registry import registerField
+from Products.Archetypes.public import Field
+from Products.Archetypes.public import ObjectField
+from zope.interface import implements
+
from bika.lims import api
from bika.lims import logger
from bika.lims.api.security import check_permission
from bika.lims.catalog import CATALOG_ANALYSIS_LISTING
-from bika.lims.interfaces import IAnalysis, ISubmitted
-from bika.lims.interfaces import IAnalysisService
from bika.lims.interfaces import IARAnalysesField
+from bika.lims.interfaces import IAnalysis
+from bika.lims.interfaces import IAnalysisService
+from bika.lims.interfaces import ISubmitted
from bika.lims.permissions import AddAnalysis
from bika.lims.utils.analysis import create_analysis
-from Products.Archetypes.public import Field
-from Products.Archetypes.public import ObjectField
-from Products.Archetypes.Registry import registerField
-from Products.Archetypes.utils import shasattr
-from Products.CMFCore.utils import getToolByName
-from zope.interface import implements
"""Field to manage Analyses on ARs
@@ -72,16 +72,21 @@ def get(self, instance, **kwargs):
:param kwargs: Keyword arguments to inject in the search query
:returns: A list of Analysis Objects/Catalog Brains
"""
- catalog = getToolByName(instance, CATALOG_ANALYSIS_LISTING)
- query = dict(
- [(k, v) for k, v in kwargs.items() if k in catalog.indexes()])
- query["portal_type"] = "Analysis"
- query["getRequestUID"] = api.get_uid(instance)
- analyses = catalog(query)
- if not kwargs.get("full_objects", False):
- return analyses
+ # Do we need to return objects or brains
+ full_objects = kwargs.get("full_objects", False)
+
+ # Bail out parameters from kwargs that don't match with indexes
+ catalog = api.get_tool(CATALOG_ANALYSIS_LISTING)
+ indexes = catalog.indexes()
+ query = dict([(k, v) for k, v in kwargs.items() if k in indexes])
- return map(api.get_object, analyses)
+ # Do the search against the catalog
+ query["portal_type"] = "Analysis"
+ query["getAncestorsUIDs"] = api.get_uid(instance)
+ brains = catalog(query)
+ if full_objects:
+ return map(api.get_object, brains)
+ return brains
security.declarePrivate('set')
@@ -99,22 +104,8 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw):
:type hidden: list
:returns: list of new assigned Analyses
"""
- # This setter returns a list of new set Analyses
- new_analyses = []
-
- # Current assigned analyses
- analyses = instance.objectValues("Analysis")
-
- # Submitted analyses must be retained
- submitted = filter(lambda an: ISubmitted.providedBy(an), analyses)
-
- # Prevent removing all analyses
- #
- # N.B.: Submitted analyses are rendered disabled in the HTML form.
- # Therefore, their UIDs are not included in the submitted UIDs.
- if not items and not submitted:
- logger.warn("Not allowed to remove all Analyses from AR.")
- return new_analyses
+ if items is None:
+ items = []
# Bail out if the items is not a list type
if not isinstance(items, (list, tuple)):
@@ -156,33 +147,22 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw):
if prices is None:
prices = dict()
- # CREATE/MODIFY ANALYSES
+ # Add analyses
+ new_analyses = map(lambda service:
+ self.add_analysis(instance, service, prices, hidden),
+ services)
+ new_analyses = filter(None, new_analyses)
- for service in services:
- service_uid = api.get_uid(service)
- keyword = service.getKeyword()
-
- # Create the Analysis if it doesn't exist
- if shasattr(instance, keyword):
- analysis = instance._getOb(keyword)
- else:
- analysis = create_analysis(instance, service)
- new_analyses.append(analysis)
-
- # set the hidden status
- analysis.setHidden(hidden.get(service_uid, False))
-
- # Set the price of the Analysis
- analysis.setPrice(prices.get(service_uid, service.getPrice()))
-
- # DELETE ANALYSES
+ # Remove analyses
+ # Since Manage Analyses view displays the analyses from partitions, we
+ # also need to take them into consideration here. Analyses from
+ # ancestors can be omitted.
+ analyses = instance.objectValues("Analysis")
+ analyses.extend(self.get_analyses_from_descendants(instance))
# Service UIDs
service_uids = map(api.get_uid, services)
- # Analyses IDs to delete
- delete_ids = []
-
# Assigned Attachments
assigned_attachments = []
@@ -194,7 +174,7 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw):
continue
# Skip non-open Analyses
- if analysis in submitted:
+ if ISubmitted.providedBy(analysis):
continue
# Remember assigned attachments
@@ -207,11 +187,9 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw):
if worksheet:
worksheet.removeAnalysis(analysis)
- delete_ids.append(analysis.getId())
-
- if delete_ids:
- # Note: subscriber might promote the AR
- instance.manage_delObjects(ids=delete_ids)
+ # Remove the analysis
+ # Note the analysis might belong to a partition
+ analysis.aq_parent.manage_delObjects(ids=[api.get_id(analysis)])
# Remove orphaned attachments
for attachment in assigned_attachments:
@@ -224,6 +202,108 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw):
return new_analyses
+ def add_analysis(self, instance, service, prices, hidden):
+ service_uid = api.get_uid(service)
+ new_analysis = False
+
+ # Gets the analysis or creates the analysis for this service
+ # Note this analysis might not belong to this current instance, but
+ # from a descendant (partition)
+ analysis = self.resolve_analysis(instance, service)
+ if not analysis:
+ # Create the analysis
+ new_analysis = True
+ keyword = service.getKeyword()
+ logger.info("Creating new analysis '{}'".format(keyword))
+ analysis = create_analysis(instance, service)
+
+ # Set the hidden status
+ analysis.setHidden(hidden.get(service_uid, False))
+
+ # Set the price of the Analysis
+ analysis.setPrice(prices.get(service_uid, service.getPrice()))
+
+ # Only return the analysis if is a new one
+ if new_analysis:
+ return analysis
+
+ return None
+
+ def resolve_analysis(self, instance, service):
+ """Resolves an analysis for the service and instance
+ """
+ # Does the analysis exists in this instance already?
+ analysis = self.get_from_instance(instance, service)
+ if analysis:
+ keyword = service.getKeyword()
+ logger.info("Analysis for '{}' already exists".format(keyword))
+ return analysis
+
+ # Does the analysis exists in an ancestor?
+ from_ancestor = self.get_from_ancestor(instance, service)
+ if from_ancestor:
+ # Move the analysis into this instance. The ancestor's
+ # analysis will be masked otherwise
+ analysis_id = api.get_id(from_ancestor)
+ logger.info("Analysis {} is from an ancestor".format(analysis_id))
+ cp = from_ancestor.aq_parent.manage_cutObjects(analysis_id)
+ instance.manage_pasteObjects(cp)
+ return instance._getOb(analysis_id)
+
+ # Does the analysis exists in a descendant?
+ from_descendant = self.get_from_descendant(instance, service)
+ if from_descendant:
+ # The analysis already exists in a partition, keep it. The
+ # analysis from current instance will be masked otherwise
+ analysis_id = api.get_id(from_descendant)
+ logger.info("Analysis {} is from a descendant".format(analysis_id))
+ return from_descendant
+
+ return None
+
+ def get_analyses_from_descendants(self, instance):
+ """Returns all the analyses from descendants
+ """
+ analyses = []
+ for descendant in instance.getDescendants(all_descendants=True):
+ analyses.extend(descendant.objectValues("Analysis"))
+ return analyses
+
+ def get_from_instance(self, instance, service):
+ """Returns an analysis for the given service from the instance
+ """
+ service_uid = api.get_uid(service)
+ for analysis in instance.objectValues("Analysis"):
+ if analysis.getServiceUID() == service_uid:
+ return analysis
+ return None
+
+ def get_from_ancestor(self, instance, service):
+ """Returns an analysis for the given service from ancestors
+ """
+ ancestor = instance.getParentAnalysisRequest()
+ if not ancestor:
+ return None
+
+ analysis = self.get_from_instance(ancestor, service)
+ return analysis or self.get_from_ancestor(ancestor, service)
+
+ def get_from_descendant(self, instance, service):
+ """Returns an analysis for the given service from descendants
+ """
+ for descendant in instance.getDescendants():
+ # Does the analysis exists in the current descendant?
+ analysis = self.get_from_instance(descendant, service)
+ if analysis:
+ return analysis
+
+ # Search in descendants from current descendant
+ analysis = self.get_from_descendant(descendant, service)
+ if analysis:
+ return analysis
+
+ return None
+
def _get_services(self, full_objects=False):
"""Fetch and return analysis service objects
"""
diff --git a/bika/lims/monkey/zcatalog.py b/bika/lims/monkey/zcatalog.py
index 1784ea289b..344fe5f62a 100644
--- a/bika/lims/monkey/zcatalog.py
+++ b/bika/lims/monkey/zcatalog.py
@@ -33,32 +33,15 @@ def searchResults(self, REQUEST=None, used=None, **kw):
and self.id == CATALOG_ANALYSIS_LISTING:
# Fetch all analyses that have the request UID passed in as an ancestor,
- # cause we want Primary ARs to always display the analyses from their
- # derived ARs (if result is not empty)
-
+ # cause we want for Samples to always return the contained analyses plus
+ # those contained in partitions
request = REQUEST.copy()
orig_uid = request.get('getRequestUID')
- # If a list of request uid, retrieve them sequentially to make the
- # masking process easier
- if isinstance(orig_uid, list):
- results = list()
- for uid in orig_uid:
- request['getRequestUID'] = [uid]
- results += self.searchResults(REQUEST=request, used=used, **kw)
- return results
-
# Get all analyses, those from descendant ARs included
del request['getRequestUID']
request['getAncestorsUIDs'] = orig_uid
- results = self.searchResults(REQUEST=request, used=used, **kw)
-
- # Masking
- primary = filter(lambda an: an.getParentUID == orig_uid, results)
- derived = filter(lambda an: an.getParentUID != orig_uid, results)
- derived_keys = map(lambda an: an.getKeyword, derived)
- results = filter(lambda an: an.getKeyword not in derived_keys, primary)
- return results + derived
+ return self.searchResults(REQUEST=request, used=used, **kw)
# Normal search
return self._catalog.searchResults(REQUEST, used, **kw)
diff --git a/bika/lims/tests/doctests/ARAnalysesField.rst b/bika/lims/tests/doctests/ARAnalysesField.rst
index 66b7989faf..10b19182e7 100644
--- a/bika/lims/tests/doctests/ARAnalysesField.rst
+++ b/bika/lims/tests/doctests/ARAnalysesField.rst
@@ -269,33 +269,16 @@ We expect to have just the `PH` Analysis again:
>>> ar.objectValues("Analysis")
[]
-Removing all Analyses is prevented, because it can not be empty:
-
- >>> new_analyses = field.set(ar, [])
- >>> ar.objectValues("Analysis")
- []
-
The field can also handle UIDs of Analyses Services:
>>> service_uids = map(api.get_uid, all_services)
>>> new_analyses = field.set(ar, service_uids)
-We expect again to have the `CA` and `MG` Analyses as well:
-
- >>> sorted(new_analyses, key=methodcaller('getId'))
- [, ]
-
-And all the three Analyses in total:
+We expect again to have all the three Analyses:
>>> sorted(ar.objectValues("Analysis"), key=methodcaller("getId"))
[, , ]
-Set again only the `PH` Analysis:
-
- >>> new_analyses = field.set(ar, [analysisservice1])
- >>> ar.objectValues("Analysis")
- []
-
The field should also handle catalog brains:
>>> brains = api.search({"portal_type": "AnalysisService", "getKeyword": "CA"})
diff --git a/bika/lims/tests/doctests/ARAnalysesFieldWithPartitions.rst b/bika/lims/tests/doctests/ARAnalysesFieldWithPartitions.rst
new file mode 100644
index 0000000000..c02f3c8b2e
--- /dev/null
+++ b/bika/lims/tests/doctests/ARAnalysesFieldWithPartitions.rst
@@ -0,0 +1,369 @@
+AR Analyses Field when using Partitions
+=======================================
+
+The setter of the ARAnalysesField takes descendants (partitions) and ancestors
+from the current instance into account to prevent inconsistencies: In a Sample
+lineage analyses from a node are always masked by same analyses in leaves. This
+can drive to inconsistencies and therefore, there is the need to keep the tree
+without duplicates.
+
+Running this test from the buildout directory:
+
+ bin/test test_textual_doctests -t ARAnalysesFieldWithPartitions
+
+Test Setup
+----------
+
+Needed imports:
+
+ >>> import transaction
+ >>> from DateTime import DateTime
+ >>> from plone.app.testing import setRoles
+ >>> from plone.app.testing import TEST_USER_ID
+ >>> from plone.app.testing import TEST_USER_PASSWORD
+ >>> from bika.lims import api
+ >>> from bika.lims.utils.analysisrequest import create_analysisrequest
+ >>> from bika.lims.utils.analysisrequest import create_partition
+ >>> from bika.lims.workflow import doActionFor as do_action_for
+ >>> from zope.interface import alsoProvides
+ >>> from zope.interface import noLongerProvides
+
+Functional Helpers:
+
+ >>> def new_sample(services):
+ ... values = {
+ ... 'Client': client.UID(),
+ ... 'Contact': contact.UID(),
+ ... 'DateSampled': DateTime().strftime("%Y-%m-%d"),
+ ... 'SampleType': sampletype.UID()}
+ ... service_uids = map(api.get_uid, services)
+ ... ar = create_analysisrequest(client, request, values, service_uids)
+ ... transitioned = do_action_for(ar, "receive")
+ ... return ar
+
+ >>> def get_analysis_from(sample, service):
+ ... service_uid = api.get_uid(service)
+ ... for analysis in sample.getAnalyses(full_objects=True):
+ ... if analysis.getServiceUID() == service_uid:
+ ... return analysis
+ ... return None
+
+Variables:
+
+ >>> portal = self.portal
+ >>> request = self.request
+ >>> setup = api.get_setup()
+
+Create some basic objects for the test:
+
+ >>> setRoles(portal, TEST_USER_ID, ['Manager',])
+ >>> client = api.create(portal.clients, "Client", Name="Happy Hills", ClientID="HH", MemberDiscountApplies=True)
+ >>> contact = api.create(client, "Contact", Firstname="Rita", Lastname="Mohale")
+ >>> sampletype = api.create(setup.bika_sampletypes, "SampleType", title="Water", Prefix="W")
+ >>> labcontact = api.create(setup.bika_labcontacts, "LabContact", Firstname="Lab", Lastname="Manager")
+ >>> department = api.create(setup.bika_departments, "Department", title="Chemistry", Manager=labcontact)
+ >>> category = api.create(setup.bika_analysiscategories, "AnalysisCategory", title="Metals", Department=department)
+ >>> Cu = api.create(setup.bika_analysisservices, "AnalysisService", title="Copper", Keyword="Cu", Price="15", Category=category.UID(), Accredited=True)
+ >>> Fe = api.create(setup.bika_analysisservices, "AnalysisService", title="Iron", Keyword="Fe", Price="10", Category=category.UID())
+ >>> Au = api.create(setup.bika_analysisservices, "AnalysisService", title="Gold", Keyword="Au", Price="20", Category=category.UID())
+ >>> Mg = api.create(setup.bika_analysisservices, "AnalysisService", title="Magnesium", Keyword="Mg", Price="20", Category=category.UID())
+
+
+Creation of a Sample with a Partition
+-------------------------------------
+
+Create a Sample and receive:
+
+ >>> sample = new_sample([Cu, Fe])
+
+Create a Partition containing of the Sample, containing the analysis `Cu`:
+
+ >>> cu = get_analysis_from(sample, Cu)
+ >>> partition = create_partition(sample, request, [cu])
+
+The analysis 'Cu' lives in the partition:
+
+ >>> cu = get_analysis_from(partition, Cu)
+ >>> api.get_parent(cu) == partition
+ True
+
+Although is also returned by the primary:
+
+ >>> cu = get_analysis_from(sample, Cu)
+ >>> api.get_parent(cu) == partition
+ True
+ >>> api.get_parent(cu) == sample
+ False
+
+
+Analyses retrieval
+------------------
+
+Get the ARAnalysesField to play with:
+
+ >>> field = sample.getField("Analyses")
+
+get_from_instance
+.................
+
+When asked for `Fe` when the primary is given, it returns the analysis, cause
+it lives in the primary:
+
+ >>> fe = field.get_from_instance(sample, Fe)
+ >>> fe.getServiceUID() == api.get_uid(Fe)
+ True
+
+But when asked for `Cu` when the primary is given, it returns None, cause it
+lives in the partition:
+
+ >>> cu = field.get_from_instance(sample, Cu)
+ >>> cu is None
+ True
+
+While it returns the analysis when the partition is used:
+
+ >>> cu = field.get_from_instance(partition, Cu)
+ >>> cu.getServiceUID() == api.get_uid(Cu)
+ True
+
+But when asking the partition for `Fe` it returns None, cause it lives in the
+ancestor:
+
+ >>> fe = field.get_from_instance(partition, Fe)
+ >>> fe is None
+ True
+
+get_from_ancestor
+.................
+
+When asked for `Fe` to primary, it returns None because there is no ancestor
+containing `Fe`:
+
+ >>> fe = field.get_from_ancestor(sample, Fe)
+ >>> fe is None
+ True
+
+But when asked for `Fe` to the partition, it returns the analysis, cause it
+it lives in an ancestor from the partition:
+
+ >>> fe = field.get_from_ancestor(partition, Fe)
+ >>> fe.getServiceUID() == api.get_uid(Fe)
+ True
+
+If I ask for `Cu`, that lives in the partition, it will return None for both:
+
+ >>> cu = field.get_from_ancestor(sample, Cu)
+ >>> cu is None
+ True
+
+ >>> cu = field.get_from_ancestor(partition, Cu)
+ >>> cu is None
+ True
+
+get_from_descendant
+...................
+
+When asked for `Fe` to primary, it returns None because there is no descendant
+containing `Fe`:
+
+ >>> fe = field.get_from_descendant(sample, Fe)
+ >>> fe is None
+ True
+
+And same with partition:
+
+ >>> fe = field.get_from_descendant(partition, Fe)
+ >>> fe is None
+ True
+
+When asked for `Cu` to primary, it returns the analysis, because it lives in a
+descendant (partition):
+
+ >>> cu = field.get_from_descendant(sample, Cu)
+ >>> cu.getServiceUID() == api.get_uid(Cu)
+ True
+
+But returns None if I ask to the partition:
+
+ >>> cu = field.get_from_descendant(partition, Cu)
+ >>> cu is None
+ True
+
+get_analyses_from_descendants
+.............................
+
+It returns the analyses contained by the descendants:
+
+ >>> field.get_analyses_from_descendants(sample)
+ []
+
+ >>> field.get_analyses_from_descendants(partition)
+ []
+
+
+Resolution of analyses from the Sample lineage
+----------------------------------------------
+
+resolve_analysis
+................
+
+Resolves the analysis from the sample lineage if exists:
+
+ >>> fe = field.resolve_analysis(sample, Fe)
+ >>> fe.getServiceUID() == api.get_uid(Fe)
+ True
+ >>> fe.aq_parent == sample
+ True
+
+ >>> cu = field.resolve_analysis(sample, Cu)
+ >>> cu.getServiceUID() == api.get_uid(Cu)
+ True
+ >>> cu.aq_parent == partition
+ True
+
+ >>> au = field.resolve_analysis(sample, Au)
+ >>> au is None
+ True
+
+But when we use the partition and the analysis is found in an ancestor, it
+moves the analysis into the partition:
+
+ >>> fe = field.resolve_analysis(partition, Fe)
+ >>> fe.getServiceUID() == api.get_uid(Fe)
+ True
+ >>> fe.aq_parent == partition
+ True
+ >>> sample.objectValues("Analysis")
+ []
+ >>> partition.objectValues("Analysis")
+ [, ]
+
+
+Addition of analyses
+--------------------
+
+add_analysis
+............
+
+Setup required parameters:
+
+ >>> prices = hidden = dict()
+
+If we try to add now an analysis that already exists, either in the partition or
+in the primary, the analysis won't be added:
+
+ >>> added = field.add_analysis(sample, Fe, prices, hidden)
+ >>> added is None
+ True
+ >>> sample.objectValues("Analysis")
+ []
+
+ >>> added = field.add_analysis(partition, Fe, prices, hidden)
+ >>> added is None
+ True
+ >>> partition.objectValues("Analysis")
+ [, ]
+
+If we add a new analysis, this will be added in the sample we are working with:
+
+ >>> au = field.add_analysis(sample, Au, prices, hidden)
+ >>> au.getServiceUID() == api.get_uid(Au)
+ True
+ >>> sample.objectValues("Analysis")
+ []
+ >>> partition.objectValues("Analysis")
+ [, ]
+
+Apply the changes:
+
+ >>> transaction.commit()
+
+If I try to add an analysis that exists in an ancestor, the analysis gets moved
+while the function returns None:
+
+ >>> added = field.add_analysis(partition, Au, prices, hidden)
+ >>> added is None
+ True
+ >>> sample.objectValues("Analysis")
+ []
+ >>> partition.objectValues("Analysis")
+ [, , ]
+
+
+Set analyses
+------------
+
+If we try to set same analyses as before to the root sample, nothing happens
+because the analyses are already there:
+
+ >>> field.set(sample, [Cu, Fe, Au])
+ []
+
+The analyses still belong to the partition though:
+
+ >>> sample.objectValues("Analysis")
+ []
+ >>> partition.objectValues("Analysis")
+ [, , ]
+
+Same result if I set the analyses to the partition:
+
+ >>> field.set(partition, [Cu, Fe, Au])
+ []
+ >>> sample.objectValues("Analysis")
+ []
+ >>> partition.objectValues("Analysis")
+ [, , ]
+
+If I add a new analysis in the list, the analysis is successfully added:
+
+ >>> field.set(sample, [Cu, Fe, Au, Mg])
+ []
+ >>> sample.objectValues("Analysis")
+ []
+
+And the partition keeps its own analyses:
+
+ >>> partition.objectValues("Analysis")
+ [, , ]
+
+Apply the changes:
+
+ >>> transaction.commit()
+
+If I set the same analyses to the partition, I don't get any result:
+
+ >>> field.set(partition, [Cu, Fe, Au, Mg])
+ []
+
+but, the `Mg` analysis has been moved into the partition:
+
+ >>> sample.objectValues("Analysis")
+ []
+ >>> partition.objectValues("Analysis")
+ [, , , ]
+
+To remove `Mg` analysis, pass the list without `Mg`:
+
+ >>> field.set(sample, [Cu, Fe, Au])
+ []
+
+The analysis `Mg` has been removed, although it belonged to the partition:
+
+ >>> sample.objectValues("Analysis")
+ []
+ >>> partition.objectValues("Analysis")
+ [, , ]
+
+But if I add a new analysis to the primary and I try to remove it from the
+partition, nothing will happen:
+
+ >>> field.set(sample, [Cu, Fe, Au, Mg])
+ []
+
+ >>> field.set(partition, [Cu, Fe, Au])
+ []
+ >>> sample.objectValues("Analysis")
+ []
+ >>> partition.objectValues("Analysis")
+ [, , ]
diff --git a/bika/lims/upgrade/v01_03_003.py b/bika/lims/upgrade/v01_03_003.py
index 90ac9fbf04..03bd53b29b 100644
--- a/bika/lims/upgrade/v01_03_003.py
+++ b/bika/lims/upgrade/v01_03_003.py
@@ -18,15 +18,19 @@
# Copyright 2018-2019 by it's authors.
# Some rights reserved, see README and LICENSE.
-from Products.Archetypes.config import UID_CATALOG
+from collections import defaultdict
+from operator import itemgetter
from bika.lims import api
from bika.lims import logger
from bika.lims.catalog.bikasetup_catalog import SETUP_CATALOG
from bika.lims.config import PROJECTNAME as product
+from bika.lims.interfaces import ISubmitted
+from bika.lims.interfaces import IVerified
from bika.lims.setuphandlers import setup_form_controller_actions
from bika.lims.upgrade import upgradestep
from bika.lims.upgrade.utils import UpgradeUtils
+from Products.Archetypes.config import UID_CATALOG
version = "1.3.3" # Remember version number in metadata.xml and setup.py
profile = "profile-{0}:default".format(product)
@@ -239,6 +243,9 @@ def upgrade(tool):
# -------- ADD YOUR STUFF BELOW --------
+ # https://github.com/senaite/senaite.core/issues/1504
+ remove_cascaded_analyses_of_root_samples(portal)
+
# Add additional JavaScripts to registry
setup.runImportStepFromProfile(profile, "jsregistry")
@@ -263,6 +270,151 @@ def upgrade(tool):
return True
+def remove_cascaded_analyses_of_root_samples(portal):
+ """Removes Analyses from Root Samples that belong to Partitions
+
+ https://github.com/senaite/senaite.core/issues/1504
+ """
+ logger.info("Removing cascaded analyses from Root Samples...")
+
+ # Query all root Samples
+ query = {
+ "isRootAncestor": True,
+ "sort_on": "created",
+ "sort_order": "ascending",
+ }
+ root_samples = api.search(query, "bika_catalog_analysisrequest_listing")
+ total = len(root_samples)
+ logger.info("{} Samples to check... ".format(total))
+
+ to_clean = []
+
+ for num, brain in enumerate(root_samples):
+ logger.debug("Checking Root Sample {}/{}".format(num+1, total))
+
+ # No Partitions, continue...
+ if not brain.getDescendantsUIDs:
+ continue
+
+ # get the root sample
+ root_sample = api.get_object(brain)
+ # get the contained analyses of the root sample
+ root_analyses = root_sample.objectIds(spec=["Analysis"])
+
+ # Mapping of cascaded Analysis -> Partition
+ analysis_mapping = {}
+
+ # check if a root analysis is located as well in one of the partitions
+ for partition in root_sample.getDescendants():
+ # get the contained analyses of the partition
+ part_analyses = partition.objectIds(spec=["Analysis"])
+ # filter analyses that cascade root analyses
+ cascaded = filter(lambda an: an in root_analyses, part_analyses)
+ # keep a mapping of analysis -> partition
+ for analysis in cascaded:
+ analysis_mapping[analysis] = partition
+
+ if analysis_mapping:
+ to_clean.append((root_sample, analysis_mapping))
+
+ # count the cases for each condition
+ case_counter = defaultdict(int)
+
+ # cleanup cascaded analyses
+ # mapping maps the analysis id -> partition
+ for sample, mapping in to_clean:
+
+ # go through the cascaded analyses and decide if the cascaded analysis
+ # should be removed from (a) the root sample or (b) the partition.
+
+ for analysis_id, partition in mapping.items():
+
+ # analysis from the root sample
+ root_an = sample[analysis_id]
+ # WF state from the root sample analysis
+ root_an_state = api.get_workflow_status_of(root_an)
+
+ # analysis from the partition sample
+ part_an = partition[analysis_id]
+ # WF state from the partition sample analysis
+ part_an_state = api.get_workflow_status_of(part_an)
+
+ case_counter["{}_{}".format(root_an_state, part_an_state)] += 1
+
+ # both analyses have the same WF state
+ if root_an_state == part_an_state:
+ # -> remove the analysis from the root sample
+ sample._delObject(analysis_id)
+ logger.info(
+ "Remove analysis '{}' in state '{}' from sample {}: {}"
+ .format(analysis_id, root_an_state,
+ api.get_id(sample), api.get_url(sample)))
+
+ # both are in verified/published state
+ elif IVerified.providedBy(root_an) and IVerified.providedBy(part_an):
+ root_an_result = root_an.getResult()
+ part_an_result = root_an.getResult()
+ if root_an_result == part_an_result:
+ # remove the root analysis
+ sample._delObject(analysis_id)
+ logger.info(
+ "Remove analysis '{}' in state '{}' from sample {}: {}"
+ .format(analysis_id, root_an_state,
+ api.get_id(sample), api.get_url(sample)))
+ else:
+ # -> unsolvable edge case
+ # display an error message
+ logger.error(
+ "Analysis '{}' of root sample in state '{}' "
+ "and Analysis of partition in state {}. "
+ "Please fix manually: {}"
+ .format(analysis_id, root_an_state, part_an_state,
+ api.get_url(sample)))
+
+ # root analysis is in invalid state
+ elif root_an_state in ["rejected", "retracted"]:
+ # -> probably the retest was automatically created in the
+ # parent instead of the partition
+ pass
+
+ # partition analysis is in invalid state
+ elif part_an_state in ["rejected", "retracted"]:
+ # -> probably the retest was automatically created in the
+ # parent instead of the partition
+ pass
+
+ # root analysis was submitted, but not the partition analysis
+ elif ISubmitted.providedBy(root_an) and not ISubmitted.providedBy(part_an):
+ # -> remove the analysis from the partition
+ partition._delObject(analysis_id)
+ logger.info(
+ "Remove analysis '{}' in state '{}' from partition {}: {}"
+ .format(analysis_id, part_an_state,
+ api.get_id(partition), api.get_url(partition)))
+
+ # partition analysis was submitted, but not the root analysis
+ elif ISubmitted.providedBy(part_an) and not ISubmitted.providedBy(root_an):
+ # -> remove the analysis from the root sample
+ sample._delObject(analysis_id)
+ logger.info(
+ "Remove analysis '{}' in state '{}' from sample {}: {}"
+ .format(analysis_id, root_an_state,
+ api.get_id(sample), api.get_url(sample)))
+
+ # inconsistent state
+ else:
+ logger.warning(
+ "Can not handle analysis '{}' located in '{}' (state {}) and '{}' (state {})"
+ .format(analysis_id,
+ repr(sample), root_an_state,
+ repr(partition), part_an_state))
+
+ logger.info("Removing cascaded analyses from Root Samples... [DONE]")
+
+ logger.info("State Combinations (root_an_state, part_an_state): {}"
+ .format(sorted(case_counter.items(), key=itemgetter(1), reverse=True)))
+
+
def reindex_client_fields(portal):
logger.info("Reindexing client fields ...")
fields_to_reindex = [