diff --git a/CHANGES.rst b/CHANGES.rst
index 16d4c0cb1b..816536a1ff 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -7,6 +7,9 @@ Changelog
**Added**
+- #1506 Specification non-compliant viewlet in Sample
+- #1506 Sample results ranges out-of-date viewlet in Sample
+- #1506 Warn icon in analyses when range is not compliant with Specification
- #1492 Dynamic Analysis Specifications
- #1507 Support for semi-colon character separator in CCEmails field
- #1499 Moved navigation portlet into core
@@ -36,6 +39,9 @@ Changelog
**Fixed**
+- #1506 Changes via manage results don't get applied to partitions
+- #1506 Fix recursion error when getting dependencies through Calculation
+- #1506 setter from ARAnalysisField does no longer return values
- #1512 QC Analyses listing appears empty in Sample view
- #1510 Error when viewing a Sample w/o Batch as client contact
- #1511 Links to partitions for Internal Use are displayed in partitions viewlet
diff --git a/bika/lims/api/analysis.py b/bika/lims/api/analysis.py
index 0d141a90ab..e466e53600 100644
--- a/bika/lims/api/analysis.py
+++ b/bika/lims/api/analysis.py
@@ -27,6 +27,8 @@
IResultOutOfRange
from zope.component._api import getAdapters
+from bika.lims.interfaces.analysis import IRequestAnalysis
+
def is_out_of_range(brain_or_object, result=_marker):
"""Checks if the result for the analysis passed in is out of range and/or
@@ -148,3 +150,33 @@ def get_formatted_interval(results_range, default=_marker):
max_bracket = max_operator == 'leq' and ']' or ')'
return "{}{};{}{}".format(min_bracket, min_str, max_str, max_bracket)
+
+
+def is_result_range_compliant(analysis):
+ """Returns whether the result range from the analysis matches with the
+ result range for the service counterpart defined in the Sample
+ """
+ if not IRequestAnalysis.providedBy(analysis):
+ return True
+
+ rr = analysis.getResultsRange()
+ service_uid = rr.get("uid", None)
+ if not api.is_uid(service_uid):
+ return True
+
+ # Compare with Sample
+ sample = analysis.getRequest()
+
+ # If no Specification is set, assume is compliant
+ specification = sample.getRawSpecification()
+ if not specification:
+ return True
+
+ # Compare with the Specification that was initially set to the Sample
+ sample_rr = sample.getResultsRange(search_by=service_uid)
+ if not sample_rr:
+ # This service is not defined in Sample's ResultsRange, we
+ # assume this *does not* break the compliance
+ return True
+
+ return rr == sample_rr
diff --git a/bika/lims/browser/analyses/view.py b/bika/lims/browser/analyses/view.py
index d47a3687c7..dcf2cab9bc 100644
--- a/bika/lims/browser/analyses/view.py
+++ b/bika/lims/browser/analyses/view.py
@@ -25,11 +25,15 @@
from DateTime import DateTime
from Products.Archetypes.config import REFERENCE_CATALOG
from Products.CMFPlone.utils import safe_unicode
+from plone.memoize import view as viewcache
+from zope.component import getAdapters
+
from bika.lims import api
from bika.lims import bikaMessageFactory as _
from bika.lims import logger
from bika.lims.api.analysis import get_formatted_interval
from bika.lims.api.analysis import is_out_of_range
+from bika.lims.api.analysis import is_result_range_compliant
from bika.lims.browser.bika_listing import BikaListingView
from bika.lims.catalog import CATALOG_ANALYSIS_LISTING
from bika.lims.config import LDL
@@ -51,8 +55,6 @@
from bika.lims.utils import get_link
from bika.lims.utils import t
from bika.lims.utils.analysis import format_uncertainty
-from plone.memoize import view as viewcache
-from zope.component import getAdapters
class AnalysesView(BikaListingView):
@@ -1033,13 +1035,25 @@ def _folder_item_specifications(self, analysis_brain, item):
# Show an icon if out of range
out_range, out_shoulders = is_out_of_range(analysis_brain)
- if not out_range:
- return
- # At least is out of range
- img = get_image("exclamation.png", title=_("Result out of range"))
- if not out_shoulders:
- img = get_image("warning.png", title=_("Result in shoulder range"))
- self._append_html_element(item, "Result", img)
+ if out_range:
+ msg = _("Result out of range")
+ img = get_image("exclamation.png", title=msg)
+ if not out_shoulders:
+ msg = _("Result in shoulder range")
+ img = get_image("warning.png", title=msg)
+ self._append_html_element(item, "Result", img)
+
+ # Show an icon if the analysis range is different from the Sample spec
+ if IAnalysisRequest.providedBy(self.context):
+ analysis = self.get_object(analysis_brain)
+ if not is_result_range_compliant(analysis):
+ service_uid = analysis_brain.getServiceUID
+ original = self.context.getResultsRange(search_by=service_uid)
+ original = get_formatted_interval(original, "")
+ msg = _("Result range is different from Specification: {}"
+ .format(original))
+ img = get_image("warning.png", title=msg)
+ self._append_html_element(item, "Specification", img)
def _folder_item_verify_icons(self, analysis_brain, item):
"""Set the analysis' verification icons to the item passed in.
diff --git a/bika/lims/browser/analysisrequest/add2.py b/bika/lims/browser/analysisrequest/add2.py
index 460398c965..81a26a6320 100644
--- a/bika/lims/browser/analysisrequest/add2.py
+++ b/bika/lims/browser/analysisrequest/add2.py
@@ -1670,7 +1670,7 @@ def ajax_submit(self):
client,
self.request,
record,
- specifications=specifications
+ results_ranges=specifications
)
except (KeyError, RuntimeError) as e:
actions.resume()
diff --git a/bika/lims/browser/analysisrequest/manage_analyses.py b/bika/lims/browser/analysisrequest/manage_analyses.py
index 1db380950c..cca18dafb8 100644
--- a/bika/lims/browser/analysisrequest/manage_analyses.py
+++ b/bika/lims/browser/analysisrequest/manage_analyses.py
@@ -142,12 +142,24 @@ def show_ar_specs(self):
@view.memoize
def get_results_range(self):
- """Get the results Range from the AR
+ """Get the results Range from the Sample, but gives priority to the
+ result ranges set in analyses. This guarantees that result ranges for
+ already present analyses are not overriden after form submission
"""
- spec = self.context.getResultsRange()
- if spec:
- return dicts_to_dict(spec, "keyword")
- return ResultsRangeDict()
+ # Extract the result ranges from Sample analyses
+ analyses = self.analyses.values()
+ analyses_rrs = map(lambda an: an.getResultsRange(), analyses)
+ analyses_rrs = filter(None, analyses_rrs)
+ rrs = dicts_to_dict(analyses_rrs, "keyword")
+
+ # Bail out ranges from Sample that are already present in analyses
+ sample_rrs = self.context.getResultsRange()
+ sample_rrs = filter(lambda rr: rr["keyword"] not in rrs, sample_rrs)
+ sample_rrs = dicts_to_dict(sample_rrs, "keyword")
+
+ # Extend result ranges with those from Sample
+ rrs.update(sample_rrs)
+ return rrs
@view.memoize
def get_currency_symbol(self):
diff --git a/bika/lims/browser/analysisspec.py b/bika/lims/browser/analysisspec.py
deleted file mode 100644
index 3608e2cd84..0000000000
--- a/bika/lims/browser/analysisspec.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of SENAITE.CORE.
-#
-# SENAITE.CORE is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free Software
-# Foundation, version 2.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
-# details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Copyright 2018-2019 by it's authors.
-# Some rights reserved, see README and LICENSE.
-
-from bika.lims.config import POINTS_OF_CAPTURE
-from bika.lims.interfaces import IAnalysisSpec
-from bika.lims.interfaces import IJSONReadExtender
-from zope.component import adapts
-from zope.interface import implements
-
-class JSONReadExtender(object):
- """Adds the UID to the ResultsRange dict. This will go away
- when we stop using keywords for this stuff.
- """
-
- implements(IJSONReadExtender)
- adapts(IAnalysisSpec)
-
- def __init__(self, context):
- self.context = context
-
- def __call__(self, request, data):
- bsc = self.context.bika_setup_catalog
- rr = []
- for i, x in enumerate(data.get("ResultsRange", [])):
- keyword = x.get("keyword")
- proxies = bsc(portal_type="AnalysisService", getKeyword=keyword)
- if proxies:
- data['ResultsRange'][i]['uid'] = proxies[0].UID
diff --git a/bika/lims/browser/analysisspec.zcml b/bika/lims/browser/analysisspec.zcml
deleted file mode 100644
index d3951601f2..0000000000
--- a/bika/lims/browser/analysisspec.zcml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
-
-
diff --git a/bika/lims/browser/configure.zcml b/bika/lims/browser/configure.zcml
index f7bcd77559..a7eb065517 100644
--- a/bika/lims/browser/configure.zcml
+++ b/bika/lims/browser/configure.zcml
@@ -11,7 +11,6 @@
-
diff --git a/bika/lims/browser/fields/__init__.py b/bika/lims/browser/fields/__init__.py
index d366dca597..4fb1102419 100644
--- a/bika/lims/browser/fields/__init__.py
+++ b/bika/lims/browser/fields/__init__.py
@@ -30,3 +30,5 @@
from .proxyfield import ProxyField
from .uidreferencefield import UIDReferenceField
from .emailsfield import EmailsField
+from .resultrangefield import ResultRangeField
+from .resultsrangesfield import ResultsRangesField
diff --git a/bika/lims/browser/fields/aranalysesfield.py b/bika/lims/browser/fields/aranalysesfield.py
index f3fbfd6082..49b0cf5acf 100644
--- a/bika/lims/browser/fields/aranalysesfield.py
+++ b/bika/lims/browser/fields/aranalysesfield.py
@@ -25,15 +25,19 @@
from Products.Archetypes.Registry import registerField
from Products.Archetypes.public import Field
from Products.Archetypes.public import ObjectField
+from zope.interface import alsoProvides
from zope.interface import implements
+from zope.interface import noLongerProvides
from bika.lims import api
from bika.lims import logger
from bika.lims.api.security import check_permission
from bika.lims.catalog import CATALOG_ANALYSIS_LISTING
+from bika.lims.catalog import SETUP_CATALOG
from bika.lims.interfaces import IARAnalysesField
from bika.lims.interfaces import IAnalysis
from bika.lims.interfaces import IAnalysisService
+from bika.lims.interfaces import IInternalUse
from bika.lims.interfaces import ISubmitted
from bika.lims.permissions import AddAnalysis
from bika.lims.utils.analysis import create_analysis
@@ -127,139 +131,171 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw):
services = filter(None, map(self._to_service, items))
# Calculate dependencies
- # FIXME Infinite recursion error possible here, if the formula includes
- # the Keyword of the Service that includes the Calculation
dependencies = map(lambda s: s.getServiceDependencies(), services)
dependencies = list(itertools.chain.from_iterable(dependencies))
# Merge dependencies and services
services = set(services + dependencies)
- # Modify existing AR specs with new form values of selected analyses.
- self._update_specs(instance, specs)
-
- # Create a mapping of Service UID -> Hidden status
- if hidden is None:
- hidden = []
- hidden = dict(map(lambda d: (d.get("uid"), d.get("hidden")), hidden))
-
- # Ensure we have a prices dictionary
- if prices is None:
- prices = dict()
+ # Modify existing AR specs with new form values of selected analyses
+ specs = self.resolve_specs(instance, specs)
# Add analyses
- new_analyses = map(lambda service:
- self.add_analysis(instance, service, prices, hidden),
- services)
- new_analyses = filter(None, new_analyses)
+ params = dict(prices=prices, hidden=hidden, specs=specs)
+ map(lambda serv: self.add_analysis(instance, serv, **params), services)
- # Remove analyses
- # Since Manage Analyses view displays the analyses from partitions, we
- # also need to take them into consideration here. Analyses from
- # ancestors can be omitted.
+ # Get all analyses (those from descendants included)
analyses = instance.objectValues("Analysis")
analyses.extend(self.get_analyses_from_descendants(instance))
- # Service UIDs
- service_uids = map(api.get_uid, services)
+ # Bail out those not in services list or submitted
+ uids = map(api.get_uid, services)
+ to_remove = filter(lambda an: an.getServiceUID() not in uids, analyses)
+ to_remove = filter(lambda an: not ISubmitted.providedBy(an), to_remove)
- # Assigned Attachments
- assigned_attachments = []
+ # Remove analyses
+ map(self.remove_analysis, to_remove)
- for analysis in analyses:
- service_uid = analysis.getServiceUID()
+ def resolve_specs(self, instance, results_ranges):
+ """Returns a dictionary where the key is the service_uid and the value
+ is its results range. The dictionary is made by extending the
+ results_ranges passed-in with the Sample's ResultsRanges (a copy of the
+ specifications initially set)
+ """
+ rrs = results_ranges or []
- # Skip if the Service is selected
- if service_uid in service_uids:
- continue
+ # Sample's Results ranges
+ sample_rrs = instance.getResultsRange()
- # Skip non-open Analyses
- if ISubmitted.providedBy(analysis):
- continue
+ # Resolve results_ranges passed-in to make sure they contain uid
+ rrs = map(lambda rr: self.resolve_uid(rr), rrs)
- # Remember assigned attachments
- # https://github.com/senaite/senaite.core/issues/1025
- assigned_attachments.extend(analysis.getAttachment())
- analysis.setAttachment([])
+ # Append those from sample that are missing in the ranges passed-in
+ service_uids = map(lambda rr: rr["uid"], rrs)
+ rrs.extend(filter(lambda rr: rr["uid"] not in service_uids, sample_rrs))
- # If it is assigned to a worksheet, unassign it before deletion.
- worksheet = analysis.getWorksheet()
- if worksheet:
- worksheet.removeAnalysis(analysis)
+ # Create a dict for easy access to results ranges
+ return dict(map(lambda rr: (rr["uid"], rr), rrs))
- # Remove the analysis
- # Note the analysis might belong to a partition
- analysis.aq_parent.manage_delObjects(ids=[api.get_id(analysis)])
+ def resolve_uid(self, result_range):
+ """Resolves the uid key for the result_range passed in if it does not
+ exist when contains a keyword
+ """
+ value = result_range.copy()
+ uid = value.get("uid")
+ if api.is_uid(uid) and uid != "0":
+ return value
+
+ # uid key does not exist or is not valid, try to infere from keyword
+ keyword = value.get("keyword")
+ if keyword:
+ query = dict(portal_type="AnalysisService", getKeyword=keyword)
+ brains = api.search(query, SETUP_CATALOG)
+ if len(brains) == 1:
+ uid = api.get_uid(brains[0])
+ value["uid"] = uid
+ return value
+
+ def add_analysis(self, instance, service, **kwargs):
+ service_uid = api.get_uid(service)
- # Remove orphaned attachments
- for attachment in assigned_attachments:
- # only delete attachments which are no further linked
- if not attachment.getLinkedAnalyses():
- logger.info(
- "Deleting attachment: {}".format(attachment.getId()))
- attachment_id = api.get_id(attachment)
- api.get_parent(attachment).manage_delObjects(attachment_id)
+ # Ensure we have suitable parameters
+ specs = kwargs.get("specs") or {}
- return new_analyses
+ # Get the hidden status for the service
+ hidden = kwargs.get("hidden") or []
+ hidden = filter(lambda d: d.get("uid") == service_uid, hidden)
+ hidden = hidden and hidden[0].get("hidden") or service.getHidden()
- def add_analysis(self, instance, service, prices, hidden):
- service_uid = api.get_uid(service)
- new_analysis = False
+ # Get the price for the service
+ prices = kwargs.get("prices") or {}
+ price = prices.get(service_uid) or service.getPrice()
# Gets the analysis or creates the analysis for this service
- # Note this analysis might not belong to this current instance, but
- # from a descendant (partition)
- analysis = self.resolve_analysis(instance, service)
- if not analysis:
+ # Note this returns a list, because is possible to have multiple
+ # partitions with same analysis
+ analyses = self.resolve_analyses(instance, service)
+ if not analyses:
# Create the analysis
- new_analysis = True
keyword = service.getKeyword()
logger.info("Creating new analysis '{}'".format(keyword))
analysis = create_analysis(instance, service)
+ analyses.append(analysis)
- # Set the hidden status
- analysis.setHidden(hidden.get(service_uid, False))
+ skip = ["cancelled", "retracted", "rejected"]
+ for analysis in analyses:
+ # Skip analyses to better not modify
+ if api.get_review_status(analysis) in skip:
+ continue
- # Set the price of the Analysis
- analysis.setPrice(prices.get(service_uid, service.getPrice()))
+ # Set the hidden status
+ analysis.setHidden(hidden)
- # Only return the analysis if is a new one
- if new_analysis:
- return analysis
+ # Set the price of the Analysis
+ analysis.setPrice(price)
- return None
+ # Set the internal use status
+ parent_sample = analysis.getRequest()
+ analysis.setInternalUse(parent_sample.getInternalUse())
+
+ # Set the result range to the analysis
+ analysis_rr = specs.get(service_uid) or analysis.getResultsRange()
+ analysis.setResultsRange(analysis_rr)
+ analysis.reindexObject()
- def resolve_analysis(self, instance, service):
- """Resolves an analysis for the service and instance
+ def remove_analysis(self, analysis):
+ """Removes a given analysis from the instance
"""
+ # Remember assigned attachments
+ # https://github.com/senaite/senaite.core/issues/1025
+ attachments = analysis.getAttachment()
+ analysis.setAttachment([])
+
+ # If assigned to a worksheet, unassign it before deletion
+ worksheet = analysis.getWorksheet()
+ if worksheet:
+ worksheet.removeAnalysis(analysis)
+
+ # Remove the analysis
+ # Note the analysis might belong to a partition
+ analysis.aq_parent.manage_delObjects(ids=[api.get_id(analysis)])
+
+ # Remove orphaned attachments
+ for attachment in attachments:
+ if not attachment.getLinkedAnalyses():
+ # only delete attachments which are no further linked
+ logger.info(
+ "Deleting attachment: {}".format(attachment.getId()))
+ attachment_id = api.get_id(attachment)
+ api.get_parent(attachment).manage_delObjects(attachment_id)
+
+ def resolve_analyses(self, instance, service):
+ """Resolves analyses for the service and instance
+ It returns a list, cause for a given sample, multiple analyses for same
+ service can exist due to the possibility of having multiple partitions
+ """
+ analyses = []
+
# Does the analysis exists in this instance already?
- analysis = self.get_from_instance(instance, service)
- if analysis:
- keyword = service.getKeyword()
- logger.info("Analysis for '{}' already exists".format(keyword))
- return analysis
+ instance_analyses = self.get_from_instance(instance, service)
+ if instance_analyses:
+ analyses.extend(instance_analyses)
# Does the analysis exists in an ancestor?
from_ancestor = self.get_from_ancestor(instance, service)
- if from_ancestor:
+ for ancestor_analysis in from_ancestor:
# Move the analysis into this instance. The ancestor's
# analysis will be masked otherwise
- analysis_id = api.get_id(from_ancestor)
+ analysis_id = api.get_id(ancestor_analysis)
logger.info("Analysis {} is from an ancestor".format(analysis_id))
- cp = from_ancestor.aq_parent.manage_cutObjects(analysis_id)
+ cp = ancestor_analysis.aq_parent.manage_cutObjects(analysis_id)
instance.manage_pasteObjects(cp)
- return instance._getOb(analysis_id)
+ analyses.append(instance._getOb(analysis_id))
- # Does the analysis exists in a descendant?
+ # Does the analysis exists in descendants?
from_descendant = self.get_from_descendant(instance, service)
- if from_descendant:
- # The analysis already exists in a partition, keep it. The
- # analysis from current instance will be masked otherwise
- analysis_id = api.get_id(from_descendant)
- logger.info("Analysis {} is from a descendant".format(analysis_id))
- return from_descendant
-
- return None
+ analyses.extend(from_descendant)
+ return analyses
def get_analyses_from_descendants(self, instance):
"""Returns all the analyses from descendants
@@ -270,48 +306,39 @@ def get_analyses_from_descendants(self, instance):
return analyses
def get_from_instance(self, instance, service):
- """Returns an analysis for the given service from the instance
+ """Returns analyses for the given service from the instance
"""
service_uid = api.get_uid(service)
- for analysis in instance.objectValues("Analysis"):
- if analysis.getServiceUID() == service_uid:
- return analysis
- return None
+ analyses = instance.objectValues("Analysis")
+ # Filter those analyses with same keyword. Note that a Sample can
+ # contain more than one analysis with same keyword because of retests
+ return filter(lambda an: an.getServiceUID() == service_uid, analyses)
def get_from_ancestor(self, instance, service):
- """Returns an analysis for the given service from ancestors
+ """Returns analyses for the given service from ancestors
"""
ancestor = instance.getParentAnalysisRequest()
if not ancestor:
- return None
+ return []
- analysis = self.get_from_instance(ancestor, service)
- return analysis or self.get_from_ancestor(ancestor, service)
+ analyses = self.get_from_instance(ancestor, service)
+ return analyses or self.get_from_ancestor(ancestor, service)
def get_from_descendant(self, instance, service):
- """Returns an analysis for the given service from descendants
+ """Returns analyses for the given service from descendants
"""
+ analyses = []
for descendant in instance.getDescendants():
# Does the analysis exists in the current descendant?
- analysis = self.get_from_instance(descendant, service)
- if analysis:
- return analysis
+ descendant_analyses = self.get_from_instance(descendant, service)
+ if descendant_analyses:
+ analyses.extend(descendant_analyses)
# Search in descendants from current descendant
- analysis = self.get_from_descendant(descendant, service)
- if analysis:
- return analysis
-
- return None
+ from_descendant = self.get_from_descendant(descendant, service)
+ analyses.extend(from_descendant)
- def _get_services(self, full_objects=False):
- """Fetch and return analysis service objects
- """
- bsc = api.get_tool("bika_setup_catalog")
- brains = bsc(portal_type="AnalysisService")
- if full_objects:
- return map(api.get_object, brains)
- return brains
+ return analyses
def _to_service(self, thing):
"""Convert to Analysis Service
@@ -345,33 +372,6 @@ def _to_service(self, thing):
"The object will be dismissed.".format(portal_type))
return None
- def _update_specs(self, instance, specs):
- """Update AR specifications
-
- :param instance: Analysis Request
- :param specs: List of Specification Records
- """
-
- if specs is None:
- return
-
- # N.B. we copy the records here, otherwise the spec will be written to
- # the attached specification of this AR
- rr = {item["keyword"]: item.copy()
- for item in instance.getResultsRange()}
- for spec in specs:
- keyword = spec.get("keyword")
- if keyword in rr:
- # overwrite the instance specification only, if the specific
- # analysis spec has min/max values set
- if all([spec.get("min"), spec.get("max")]):
- rr[keyword].update(spec)
- else:
- rr[keyword] = spec
- else:
- rr[keyword] = spec
- return instance.setResultsRange(rr.values())
-
registerField(ARAnalysesField,
title="Analyses",
diff --git a/bika/lims/browser/fields/configure.zcml b/bika/lims/browser/fields/configure.zcml
index 9ebfc6645e..c2a8a090a9 100644
--- a/bika/lims/browser/fields/configure.zcml
+++ b/bika/lims/browser/fields/configure.zcml
@@ -1,7 +1,15 @@
+
+
+
diff --git a/bika/lims/browser/fields/resultrangefield.py b/bika/lims/browser/fields/resultrangefield.py
new file mode 100644
index 0000000000..446d2b317b
--- /dev/null
+++ b/bika/lims/browser/fields/resultrangefield.py
@@ -0,0 +1,110 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of SENAITE.CORE.
+#
+# SENAITE.CORE is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, version 2.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+# details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc., 51
+# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Copyright 2018-2020 by it's authors.
+# Some rights reserved, see README and LICENSE.
+
+from operator import itemgetter
+
+from Products.ATExtensions.field import RecordField
+from Products.Archetypes.Registry import registerField
+from Products.Archetypes.interfaces import IFieldDefaultProvider
+from zope.interface import implements
+
+from bika.lims import bikaMessageFactory as _
+from bika.lims.interfaces.analysis import IRequestAnalysis
+
+
+# A tuple of (subfield_id, subfield_label,)
+SUB_FIELDS = (
+ ("keyword", _("Analysis Service")),
+ ("min_operator", _("Min operator")),
+ ("min", _('Min')),
+ ("max_operator", _("Max operator")),
+ ("max", _('Max')),
+ ("warn_min", _('Min warn')),
+ ("warn_max", _('Max warn')),
+ ("hidemin", _('< Min')),
+ ("hidemax", _('> Max')),
+ ("rangecomment", _('Range Comment')),
+)
+
+
+class ResultRangeField(RecordField):
+ """A field that stores a results range
+ """
+ _properties = RecordField._properties.copy()
+ _properties.update({
+ "type": "results_range_field",
+ "subfields": map(itemgetter(0), SUB_FIELDS),
+ "subfield_labels": dict(SUB_FIELDS),
+ })
+
+ def set(self, instance, value, **kwargs):
+ from bika.lims.content.analysisspec import ResultsRangeDict
+ if isinstance(value, ResultsRangeDict):
+ # Better store a built-in dict so it will always be available even
+ # if ResultsRangeDict is removed or changed
+ value = dict(value)
+
+ super(ResultRangeField, self).set(instance, value, **kwargs)
+
+ def get(self, instance, **kwargs):
+ from bika.lims.content.analysisspec import ResultsRangeDict
+ value = super(ResultRangeField, self).get(instance, **kwargs)
+ if value:
+ return ResultsRangeDict(dict(value.items()))
+ return {}
+
+
+registerField(ResultRangeField, title="ResultRange",
+ description="Used for storing a result range",)
+
+
+class DefaultResultsRangeProvider(object):
+ """Default Results Range provider for analyses
+ This is used for backwards-compatibility for when the analysis' ResultsRange
+ was obtained directly from Sample's ResultsRanges field, before this:
+ https://github.com/senaite/senaite.core/pull/1506
+ """
+ implements(IFieldDefaultProvider)
+
+ def __init__(self, context):
+ self.context = context
+
+ def __call__(self):
+ """Get the default value.
+ """
+ if not IRequestAnalysis.providedBy(self.context):
+ return {}
+
+ # Get the AnalysisRequest to look at
+ analysis = self.context
+ sample = analysis.getRequest()
+ if not sample:
+ return {}
+
+ # Search by keyword
+ field = sample.getField("ResultsRange")
+ keyword = analysis.getKeyword()
+ rr = field.get(sample, search_by=keyword)
+ if rr:
+ return rr
+
+ # Try with uid (this shouldn't be necessary)
+ service_uid = analysis.getServiceUID()
+ return field.get(sample, search_by=service_uid) or {}
diff --git a/bika/lims/browser/fields/resultsrangesfield.py b/bika/lims/browser/fields/resultsrangesfield.py
new file mode 100644
index 0000000000..3e429274f4
--- /dev/null
+++ b/bika/lims/browser/fields/resultsrangesfield.py
@@ -0,0 +1,108 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of SENAITE.CORE.
+#
+# SENAITE.CORE is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, version 2.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+# details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc., 51
+# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Copyright 2018-2020 by it's authors.
+# Some rights reserved, see README and LICENSE.
+
+from operator import itemgetter
+
+from Products.ATExtensions.field import RecordsField
+from Products.Archetypes.Registry import registerField
+
+from bika.lims import api
+from bika.lims.browser.fields.resultrangefield import SUB_FIELDS
+from bika.lims.browser.widgets import AnalysisSpecificationWidget
+from bika.lims.catalog import SETUP_CATALOG
+
+
+class ResultsRangesField(RecordsField):
+ """A field that stores a list of results ranges
+ """
+ _properties = RecordsField._properties.copy()
+ _properties.update({
+ "type": "specifications",
+ "subfields": map(itemgetter(0), SUB_FIELDS),
+ "subfield_labels": dict(SUB_FIELDS),
+ "subfield_validators": {
+ "min": "analysisspecs_validator",
+ "max": "analysisspecs_validator",
+ },
+ "required_subfields": ("keyword", ),
+ "widget": AnalysisSpecificationWidget,
+ })
+
+ def get(self, instance, **kwargs):
+ values = super(ResultsRangesField, self).get(instance, **kwargs)
+
+ # If a keyword or an uid has been specified, return the result range
+ # for that uid or keyword only
+ if "search_by" in kwargs:
+ uid_or_keyword = kwargs.get("search_by")
+ if uid_or_keyword:
+ return self.getResultRange(values, uid_or_keyword) or {}
+ return {}
+
+ # Convert the dict items to ResultRangeDict for easy handling
+ from bika.lims.content.analysisspec import ResultsRangeDict
+ return map(lambda val: ResultsRangeDict(dict(val.items())), values)
+
+ def getResultRange(self, values, uid_keyword_service):
+ if not uid_keyword_service:
+ return None
+
+ if api.is_object(uid_keyword_service):
+ uid_keyword_service = api.get_uid(uid_keyword_service)
+
+ key = "keyword"
+ if api.is_uid(uid_keyword_service) and uid_keyword_service != "0":
+ # We always assume a uid of "0" refers to portal
+ key = "uid"
+
+ # Find out the item for the given uid/keyword
+ from bika.lims.content.analysisspec import ResultsRangeDict
+ value = filter(lambda v: v.get(key) == uid_keyword_service, values)
+ return value and ResultsRangeDict(dict(value[0].items())) or None
+
+ def _to_dict(self, value):
+ """Convert the records to persistent dictionaries
+ """
+ # Resolve items to guarantee all them have the key uid
+ value = super(ResultsRangesField, self)._to_dict(value)
+ return map(self.resolve_uid, value)
+
+ def resolve_uid(self, raw_dict):
+ """Returns a copy of the raw dictionary passed in, but with additional
+ key "uid". It's value is inferred from "keyword" if present
+ """
+ value = raw_dict.copy()
+ uid = value.get("uid")
+ if api.is_uid(uid) and uid != "0":
+ return value
+
+ # uid key does not exist or is not valid, try to infere from keyword
+ keyword = value.get("keyword")
+ if keyword:
+ query = dict(portal_type="AnalysisService", getKeyword=keyword)
+ brains = api.search(query, SETUP_CATALOG)
+ if len(brains) == 1:
+ uid = api.get_uid(brains[0])
+ value["uid"] = uid
+ return value
+
+
+registerField(ResultsRangesField, title="ResultsRanges",
+ description="Used for storing a results ranges",)
diff --git a/bika/lims/browser/header_table.py b/bika/lims/browser/header_table.py
index da66866834..177ba54902 100644
--- a/bika/lims/browser/header_table.py
+++ b/bika/lims/browser/header_table.py
@@ -24,6 +24,7 @@
from bika.lims import logger
from bika.lims.api.security import check_permission
from bika.lims.browser import BrowserView
+from bika.lims.interfaces import IAnalysisRequestWithPartitions
from bika.lims.interfaces import IHeaderTableFieldRenderer
from bika.lims.utils import t
from plone.memoize import view as viewcache
@@ -82,6 +83,12 @@ def __call__(self):
self.context.plone_utils.addPortalMessage(message, "info")
return self.template()
+ @viewcache.memoize
+ def is_primary_with_partitions(self):
+ """Check if the Sample is a primary with partitions
+ """
+ return IAnalysisRequestWithPartitions.providedBy(self.context)
+
@viewcache.memoize
def is_edit_allowed(self):
"""Check permission 'ModifyPortalContent' on the context
diff --git a/bika/lims/browser/partition_magic.py b/bika/lims/browser/partition_magic.py
index bb829d8829..f55baf715e 100644
--- a/bika/lims/browser/partition_magic.py
+++ b/bika/lims/browser/partition_magic.py
@@ -86,8 +86,6 @@ def __call__(self):
# The creation of partitions w/o analyses is allowed. Maybe the
# user wants to add the analyses later manually or wants to keep
# this partition stored in a freezer for some time
- # Note we set "remove_primary_analyses" to False cause we want
- # user to be able to add same analyses to different partitions.
analyses_uids = partition.get("analyses", [])
partition = create_partition(
request=self.request,
@@ -96,7 +94,6 @@ def __call__(self):
container=container_uid,
preservation=preservation_uid,
analyses=analyses_uids,
- remove_primary_analyses=False,
internal_use=internal_use,
)
partitions.append(partition)
@@ -112,9 +109,6 @@ def __call__(self):
# If no partitions were created, show a warning message
return self.redirect(message=_("No partitions were created"))
- # Remove analyses from primary Analysis Requests
- self.remove_primary_analyses()
-
message = _("Created {} partitions: {}".format(
len(partitions), ", ".join(map(api.get_title, partitions))))
return self.redirect(message=message)
@@ -133,14 +127,6 @@ def push_primary_analyses_for_removal(self, analysis_request, analyses):
to_remove.extend(analyses)
self.analyses_to_remove[analysis_request] = list(set(to_remove))
- def remove_primary_analyses(self):
- """Remove analyses relocated to partitions
- """
- for ar, analyses in self.analyses_to_remove.items():
- analyses_ids = list(set(map(api.get_id, analyses)))
- ar.manage_delObjects(analyses_ids)
- self.analyses_to_remove = dict()
-
def get_ar_data(self):
"""Returns a list of AR data
"""
diff --git a/bika/lims/browser/templates/header_table.pt b/bika/lims/browser/templates/header_table.pt
index 22f48884db..d33e5ff89f 100644
--- a/bika/lims/browser/templates/header_table.pt
+++ b/bika/lims/browser/templates/header_table.pt
@@ -12,7 +12,8 @@
@@ -21,22 +22,34 @@