From 8ac1da04719e61c90bd6fc0e54715a0d5fbb7676 Mon Sep 17 00:00:00 2001 From: Ramon Bartl Date: Wed, 15 Jan 2020 22:05:53 +0100 Subject: [PATCH 01/22] Added cleanup migration step --- bika/lims/upgrade/v01_03_003.py | 67 +++++++++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/bika/lims/upgrade/v01_03_003.py b/bika/lims/upgrade/v01_03_003.py index 90ac9fbf04..ebc013e17e 100644 --- a/bika/lims/upgrade/v01_03_003.py +++ b/bika/lims/upgrade/v01_03_003.py @@ -239,6 +239,9 @@ def upgrade(tool): # -------- ADD YOUR STUFF BELOW -------- + # https://github.com/senaite/senaite.core/issues/1504 + remove_cascaded_analyses_of_root_samples(portal) + # Add additional JavaScripts to registry setup.runImportStepFromProfile(profile, "jsregistry") @@ -263,6 +266,70 @@ def upgrade(tool): return True +def remove_cascaded_analyses_of_root_samples(portal): + """Removes Analyses from Root Samples that belong to Partitions + + https://github.com/senaite/senaite.core/issues/1504 + """ + logger.info("Removing cascaded analyses from Root Samples...") + + # Query all root Samples + query = { + "isRootAncestor": True, + "sort_on": "created", + "sort_order": "ascending", + } + root_samples = api.search(query, "bika_catalog_analysisrequest_listing") + total = len(root_samples) + logger.info("{} Samples to check... ".format(total)) + + to_clean = [] + + for num, brain in enumerate(root_samples): + logger.debug("Checking Root Sample {}/{}".format(num+1, total)) + + # No Partitions, continue... + if not brain.getDescendantsUIDs: + continue + + # get the root sample + root_sample = api.get_object(brain) + # get the contained analyses of the root sample + root_analyses = root_sample.objectIds(spec=["Analysis"]) + + analyses_to_remove = [] + + # check if a root analysis is located as well in one of the partitions + for partition in root_sample.getDescendants(): + # get the contained analyses of the partition + part_analyses = partition.objectIds(spec=["Analysis"]) + # filter analyses that cascade root analyses + cascaded = filter(lambda an: an in root_analyses, part_analyses) + # Some of the partition analyses cascade the root analyses + if cascaded: + # remember IDs to be removed from the root sample + analyses_to_remove.extend(cascaded) + logger.debug( + "Sample {} contains cascaded Analyses of Partition {}: {}" + .format(api.get_id(root_sample), + api.get_id(partition), + cascaded)) + + if analyses_to_remove: + # append to cleanup list + to_clean.append((root_sample, analyses_to_remove)) + + if to_clean: + logger.info("Found {} Root Samples that contain cascaded Analyses" + .format(len(to_clean))) + + # Uncomment before flight + # for sample, analyses in to_clean: + # sample.manage_delObjects(analyses) + + logger.info("Removing cascaded analyses from Root Samples... [DONE]") + + def reindex_client_fields(portal): logger.info("Reindexing client fields ...") fields_to_reindex = [ From 5c6dd12e58facbc9da7b4690fa62cc088c087a9b Mon Sep 17 00:00:00 2001 From: Ramon Bartl Date: Wed, 15 Jan 2020 22:37:08 +0100 Subject: [PATCH 02/22] Bypass permission check in migration --- bika/lims/upgrade/v01_03_003.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/bika/lims/upgrade/v01_03_003.py b/bika/lims/upgrade/v01_03_003.py index ebc013e17e..6e2f884c0b 100644 --- a/bika/lims/upgrade/v01_03_003.py +++ b/bika/lims/upgrade/v01_03_003.py @@ -323,11 +323,21 @@ def remove_cascaded_analyses_of_root_samples(portal): logger.info("Found {} Root Samples that contain cascaded Analyses" .format(len(to_clean))) - # Uncomment before flight - # for sample, analyses in to_clean: - # sample.manage_delObjects(analyses) - - logger.info("Removing cascaded analyses from Root Samples... [DONE]") + for sample, analyses in to_clean: + sid = api.get_id(sample) + for analysis in analyses: + an = sample[analysis] + state = api.get_workflow_status_of(an) + if state != "unassigned": + # XXX What to do when assigned, rejected ... ? + pass + logger.info("Deleting Analysis '{}' in State '{}' from '{}'" + .format(analysis, state, sid)) + # Uncomment before flight + # sample._delObject(analysis) + + logger.info("Removing cascaded analyses from {} Root Samples... [DONE]" + .format(len(to_clean))) def reindex_client_fields(portal): From 20b7a405482010bcf993da118470ad5cd9385351 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Wed, 15 Jan 2020 23:44:20 +0100 Subject: [PATCH 03/22] Consider ancestors and partitions when adding analyses to a Sample --- bika/lims/browser/fields/aranalysesfield.py | 113 ++++++++++++++++---- 1 file changed, 91 insertions(+), 22 deletions(-) diff --git a/bika/lims/browser/fields/aranalysesfield.py b/bika/lims/browser/fields/aranalysesfield.py index df37ed8623..9c7ca73f74 100644 --- a/bika/lims/browser/fields/aranalysesfield.py +++ b/bika/lims/browser/fields/aranalysesfield.py @@ -29,6 +29,7 @@ from bika.lims.interfaces import IAnalysis, ISubmitted from bika.lims.interfaces import IAnalysisService from bika.lims.interfaces import IARAnalysesField +from bika.lims.interfaces import IBaseAnalysis from bika.lims.permissions import AddAnalysis from bika.lims.utils.analysis import create_analysis from Products.Archetypes.public import Field @@ -99,9 +100,6 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): :type hidden: list :returns: list of new assigned Analyses """ - # This setter returns a list of new set Analyses - new_analyses = [] - # Current assigned analyses analyses = instance.objectValues("Analysis") @@ -114,7 +112,7 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): # Therefore, their UIDs are not included in the submitted UIDs. if not items and not submitted: logger.warn("Not allowed to remove all Analyses from AR.") - return new_analyses + return [] # Bail out if the items is not a list type if not isinstance(items, (list, tuple)): @@ -156,24 +154,10 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): if prices is None: prices = dict() - # CREATE/MODIFY ANALYSES - - for service in services: - service_uid = api.get_uid(service) - keyword = service.getKeyword() - - # Create the Analysis if it doesn't exist - if shasattr(instance, keyword): - analysis = instance._getOb(keyword) - else: - analysis = create_analysis(instance, service) - new_analyses.append(analysis) - - # set the hidden status - analysis.setHidden(hidden.get(service_uid, False)) - - # Set the price of the Analysis - analysis.setPrice(prices.get(service_uid, service.getPrice())) + # Add analyses + new_analyses = map(lambda service: + self.add_analysis(instance, service, prices, hidden), + services) # DELETE ANALYSES @@ -224,6 +208,91 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): return new_analyses + def add_analysis(self, instance, service, prices, hidden): + service_uid = api.get_uid(service) + + # Gets the analysis or creates the analysis for this service + # Note this analysis might not belong to this current instance, but + # from a descendant (partition) + analysis = self.resolve_analysis(instance, service) + + # Set the hidden status + analysis.setHidden(hidden.get(service_uid, False)) + + # Set the price of the Analysis + analysis.setPrice(prices.get(service_uid, service.getPrice())) + return analysis + + def resolve_analysis(self, instance, service): + """Resolves an analysis for the service and instance + """ + # Does the analysis exists in this instance already? + analysis = self.get_from_instance(instance, service) + if analysis: + keyword = service.getKeyword() + logger.info("Analysis for '{}' already exists".format(keyword)) + return analysis + + # Does the analysis exists in an ancestor? + from_ancestor = self.get_from_ancestor(instance, service) + if from_ancestor: + # Move the analysis into this instance. The ancestor's + # analysis will be masked otherwise + analysis_id = api.get_id(from_ancestor) + logger.info("Analysis {} is from an ancestor".format(analysis_id)) + cp = from_ancestor.aq_parent.manage_cutObjects(analysis_id) + instance.manage_pasteObjects(cp) + return instance._getOb(analysis_id) + + # Does the analysis exists in a descendant? + from_descendant = self.get_from_descendant(instance, service) + if from_descendant: + # The analysis already exists in a partition, keep it. The + # analysis from current instance will be masked otherwise + analysis_id = api.get_id(from_descendant) + logger.info("Analysis {} is from a descendant".format(analysis_id)) + return from_descendant + + # Create the analysis + logger.info("Creating new analysis '{}'".format(service.getKeyword())) + return create_analysis(instance, service) + + def get_from_instance(self, instance, service): + """Returns an analysis for the given service from the instance + """ + # Note we filter by keyword cause services are history-aware + keyword = service.getKeyword() + for analysis in instance.objectValues("Analysis"): + if analysis.getKeyword() == keyword: + return analysis + return None + + def get_from_ancestor(self, instance, service): + """Returns an analysis for the given service from ancestors + """ + ancestor = instance.getParentAnalysisRequest() + if not ancestor: + return None + + analysis = self.get_from_instance(ancestor, service) + return analysis or self.get_from_ancestor(ancestor, service) + + def get_from_descendant(self, instance, service): + """Returns an analysis for the given service from descendants + """ + for descendant in instance.getDescendants(): + # Does the analysis exists in the current descendant? + analysis = self.get_from_instance(descendant, service) + if analysis: + return analysis + + # Search in descendants from current descendant + analysis = self.get_from_descendant(descendant, service) + if analysis: + return analysis + + return None + def _get_services(self, full_objects=False): """Fetch and return analysis service objects """ From 013dd6c87256ff05caad82ebc8d1e823cbadd597 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 00:26:14 +0100 Subject: [PATCH 04/22] Take analyses from partitions when removing analyses from a Sample --- bika/lims/browser/fields/aranalysesfield.py | 32 +++++++++++++-------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/bika/lims/browser/fields/aranalysesfield.py b/bika/lims/browser/fields/aranalysesfield.py index 9c7ca73f74..d6a6279067 100644 --- a/bika/lims/browser/fields/aranalysesfield.py +++ b/bika/lims/browser/fields/aranalysesfield.py @@ -155,18 +155,21 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): prices = dict() # Add analyses + # The returned analyses can contain either newly created analyses or + # analyses from partitions and/or ancestors new_analyses = map(lambda service: self.add_analysis(instance, service, prices, hidden), services) - # DELETE ANALYSES + # Remove analyses + # Since Manage Analyses view displays the analyses from partitions, we + # also need to take them into consideration here. Analyses from + # ancestors can be omitted + analyses.extend(self.get_analyses_from_descendants(instance)) # Service UIDs service_uids = map(api.get_uid, services) - # Analyses IDs to delete - delete_ids = [] - # Assigned Attachments assigned_attachments = [] @@ -191,11 +194,9 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): if worksheet: worksheet.removeAnalysis(analysis) - delete_ids.append(analysis.getId()) - - if delete_ids: - # Note: subscriber might promote the AR - instance.manage_delObjects(ids=delete_ids) + # Remove the analysis + # Note the analysis might belong to a partition + analysis.aq_parent.manage_delObjects(ids=[api.get_id(analysis)]) # Remove orphaned attachments for attachment in assigned_attachments: @@ -257,13 +258,20 @@ def resolve_analysis(self, instance, service): logger.info("Creating new analysis '{}'".format(service.getKeyword())) return create_analysis(instance, service) + def get_analyses_from_descendants(self, instance): + """Returns all the analyses from descendants + """ + analyses = [] + for descendant in instance.getDescendants(all_descendants=True): + analyses.extend(descendant.objectValues("Analysis")) + return analyses + def get_from_instance(self, instance, service): """Returns an analysis for the given service from the instance """ - # Note we filter by keyword cause services are history-aware - keyword = service.getKeyword() + service_uid = api.get_uid(service) for analysis in instance.objectValues("Analysis"): - if analysis.getKeyword() == keyword: + if analysis.getServiceUID() == service_uid: return analysis return None From 500ae96fb67f7a7b44c19f441375c92581c24b60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 00:32:19 +0100 Subject: [PATCH 05/22] Ensure submitted analyses from partitions cannot be removed --- bika/lims/browser/fields/aranalysesfield.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/bika/lims/browser/fields/aranalysesfield.py b/bika/lims/browser/fields/aranalysesfield.py index d6a6279067..5e9cfe5236 100644 --- a/bika/lims/browser/fields/aranalysesfield.py +++ b/bika/lims/browser/fields/aranalysesfield.py @@ -103,17 +103,6 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): # Current assigned analyses analyses = instance.objectValues("Analysis") - # Submitted analyses must be retained - submitted = filter(lambda an: ISubmitted.providedBy(an), analyses) - - # Prevent removing all analyses - # - # N.B.: Submitted analyses are rendered disabled in the HTML form. - # Therefore, their UIDs are not included in the submitted UIDs. - if not items and not submitted: - logger.warn("Not allowed to remove all Analyses from AR.") - return [] - # Bail out if the items is not a list type if not isinstance(items, (list, tuple)): raise TypeError( @@ -181,7 +170,7 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): continue # Skip non-open Analyses - if analysis in submitted: + if ISubmitted.providedBy(analysis): continue # Remember assigned attachments From e2ad45465105ef90a98abfacbd53e4d1f972382a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 00:41:50 +0100 Subject: [PATCH 06/22] Cleanup imports --- bika/lims/browser/fields/aranalysesfield.py | 24 ++++++++++----------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/bika/lims/browser/fields/aranalysesfield.py b/bika/lims/browser/fields/aranalysesfield.py index 5e9cfe5236..0e89062b10 100644 --- a/bika/lims/browser/fields/aranalysesfield.py +++ b/bika/lims/browser/fields/aranalysesfield.py @@ -22,22 +22,22 @@ from AccessControl import ClassSecurityInfo from AccessControl import Unauthorized +from Products.Archetypes.Registry import registerField +from Products.Archetypes.public import Field +from Products.Archetypes.public import ObjectField +from Products.CMFCore.utils import getToolByName +from zope.interface import implements + from bika.lims import api from bika.lims import logger from bika.lims.api.security import check_permission from bika.lims.catalog import CATALOG_ANALYSIS_LISTING -from bika.lims.interfaces import IAnalysis, ISubmitted -from bika.lims.interfaces import IAnalysisService from bika.lims.interfaces import IARAnalysesField -from bika.lims.interfaces import IBaseAnalysis +from bika.lims.interfaces import IAnalysis +from bika.lims.interfaces import IAnalysisService +from bika.lims.interfaces import ISubmitted from bika.lims.permissions import AddAnalysis from bika.lims.utils.analysis import create_analysis -from Products.Archetypes.public import Field -from Products.Archetypes.public import ObjectField -from Products.Archetypes.Registry import registerField -from Products.Archetypes.utils import shasattr -from Products.CMFCore.utils import getToolByName -from zope.interface import implements """Field to manage Analyses on ARs @@ -100,9 +100,6 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): :type hidden: list :returns: list of new assigned Analyses """ - # Current assigned analyses - analyses = instance.objectValues("Analysis") - # Bail out if the items is not a list type if not isinstance(items, (list, tuple)): raise TypeError( @@ -153,7 +150,8 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): # Remove analyses # Since Manage Analyses view displays the analyses from partitions, we # also need to take them into consideration here. Analyses from - # ancestors can be omitted + # ancestors can be omitted. + analyses = instance.objectValues("Analysis") analyses.extend(self.get_analyses_from_descendants(instance)) # Service UIDs From 1fbafd5192342f11c656b4d69ca4601444a910f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 00:50:31 +0100 Subject: [PATCH 07/22] Only return the analyses that have been created --- bika/lims/browser/fields/aranalysesfield.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/bika/lims/browser/fields/aranalysesfield.py b/bika/lims/browser/fields/aranalysesfield.py index 0e89062b10..4516da8757 100644 --- a/bika/lims/browser/fields/aranalysesfield.py +++ b/bika/lims/browser/fields/aranalysesfield.py @@ -146,6 +146,7 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): new_analyses = map(lambda service: self.add_analysis(instance, service, prices, hidden), services) + new_analyses = filter(None, new_analyses) # Remove analyses # Since Manage Analyses view displays the analyses from partitions, we @@ -198,18 +199,30 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): def add_analysis(self, instance, service, prices, hidden): service_uid = api.get_uid(service) + new_analysis = False # Gets the analysis or creates the analysis for this service # Note this analysis might not belong to this current instance, but # from a descendant (partition) analysis = self.resolve_analysis(instance, service) + if not analysis: + # Create the analysis + new_analysis = True + keyword = service.getKeyword() + logger.info("Creating new analysis '{}'".format(keyword)) + analysis = create_analysis(instance, service) # Set the hidden status analysis.setHidden(hidden.get(service_uid, False)) # Set the price of the Analysis analysis.setPrice(prices.get(service_uid, service.getPrice())) - return analysis + + # Only return the analysis if is a new one + if new_analysis: + return analysis + + return None def resolve_analysis(self, instance, service): """Resolves an analysis for the service and instance @@ -241,9 +254,7 @@ def resolve_analysis(self, instance, service): logger.info("Analysis {} is from a descendant".format(analysis_id)) return from_descendant - # Create the analysis - logger.info("Creating new analysis '{}'".format(service.getKeyword())) - return create_analysis(instance, service) + return None def get_analyses_from_descendants(self, instance): """Returns all the analyses from descendants From a4e9a46885acfa2c935184b4789c260b2b88d631 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 00:58:10 +0100 Subject: [PATCH 08/22] Fix test --- bika/lims/tests/doctests/ARAnalysesField.rst | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/bika/lims/tests/doctests/ARAnalysesField.rst b/bika/lims/tests/doctests/ARAnalysesField.rst index 66b7989faf..10b19182e7 100644 --- a/bika/lims/tests/doctests/ARAnalysesField.rst +++ b/bika/lims/tests/doctests/ARAnalysesField.rst @@ -269,33 +269,16 @@ We expect to have just the `PH` Analysis again: >>> ar.objectValues("Analysis") [] -Removing all Analyses is prevented, because it can not be empty: - - >>> new_analyses = field.set(ar, []) - >>> ar.objectValues("Analysis") - [] - The field can also handle UIDs of Analyses Services: >>> service_uids = map(api.get_uid, all_services) >>> new_analyses = field.set(ar, service_uids) -We expect again to have the `CA` and `MG` Analyses as well: - - >>> sorted(new_analyses, key=methodcaller('getId')) - [, ] - -And all the three Analyses in total: +We expect again to have all the three Analyses: >>> sorted(ar.objectValues("Analysis"), key=methodcaller("getId")) [, , ] -Set again only the `PH` Analysis: - - >>> new_analyses = field.set(ar, [analysisservice1]) - >>> ar.objectValues("Analysis") - [] - The field should also handle catalog brains: >>> brains = api.search({"portal_type": "AnalysisService", "getKeyword": "CA"}) From 7723fcc5c56904cfb8a12d0ce7cccf11110c8bf4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 01:00:41 +0100 Subject: [PATCH 09/22] Remove obsolete comment --- bika/lims/browser/fields/aranalysesfield.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/bika/lims/browser/fields/aranalysesfield.py b/bika/lims/browser/fields/aranalysesfield.py index 4516da8757..10c6afb2cc 100644 --- a/bika/lims/browser/fields/aranalysesfield.py +++ b/bika/lims/browser/fields/aranalysesfield.py @@ -141,8 +141,6 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): prices = dict() # Add analyses - # The returned analyses can contain either newly created analyses or - # analyses from partitions and/or ancestors new_analyses = map(lambda service: self.add_analysis(instance, service, prices, hidden), services) From fa5844ac269395faa1655d2c5c1a54875cc19cc5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 10:34:19 +0100 Subject: [PATCH 10/22] New doctest for functions from ARAnalysesField --- .../ARAnalysesFieldWithPartitions.rst | 289 ++++++++++++++++++ 1 file changed, 289 insertions(+) create mode 100644 bika/lims/tests/doctests/ARAnalysesFieldWithPartitions.rst diff --git a/bika/lims/tests/doctests/ARAnalysesFieldWithPartitions.rst b/bika/lims/tests/doctests/ARAnalysesFieldWithPartitions.rst new file mode 100644 index 0000000000..4d27cdc4fe --- /dev/null +++ b/bika/lims/tests/doctests/ARAnalysesFieldWithPartitions.rst @@ -0,0 +1,289 @@ +AR Analyses Field when using Partitions +======================================= + +The setter of the ARAnalysesField takes descendants (partitions) and ancestors +from the current instance into account to prevent inconsistencies: In a Sample +lineage analyses from a node are always masked by same analyses in leaves. This +can drive to inconsistencies and therefore, there is the need to keep the tree +without duplicates. + +Running this test from the buildout directory: + + bin/test test_textual_doctests -t ARAnalysesFieldWithPartitions + +Test Setup +---------- + +Needed imports: + + >>> import transaction + >>> from DateTime import DateTime + >>> from plone.app.testing import setRoles + >>> from plone.app.testing import TEST_USER_ID + >>> from plone.app.testing import TEST_USER_PASSWORD + >>> from bika.lims import api + >>> from bika.lims.utils.analysisrequest import create_analysisrequest + >>> from bika.lims.utils.analysisrequest import create_partition + >>> from bika.lims.workflow import doActionFor as do_action_for + >>> from zope.interface import alsoProvides + >>> from zope.interface import noLongerProvides + +Functional Helpers: + + >>> def new_sample(services): + ... values = { + ... 'Client': client.UID(), + ... 'Contact': contact.UID(), + ... 'DateSampled': DateTime().strftime("%Y-%m-%d"), + ... 'SampleType': sampletype.UID()} + ... service_uids = map(api.get_uid, services) + ... ar = create_analysisrequest(client, request, values, service_uids) + ... transitioned = do_action_for(ar, "receive") + ... return ar + + >>> def get_analysis_from(sample, service): + ... service_uid = api.get_uid(service) + ... for analysis in sample.getAnalyses(full_objects=True): + ... if analysis.getServiceUID() == service_uid: + ... return analysis + ... return None + +Variables: + + >>> portal = self.portal + >>> request = self.request + >>> setup = api.get_setup() + +Create some basic objects for the test: + + >>> setRoles(portal, TEST_USER_ID, ['Manager',]) + >>> client = api.create(portal.clients, "Client", Name="Happy Hills", ClientID="HH", MemberDiscountApplies=True) + >>> contact = api.create(client, "Contact", Firstname="Rita", Lastname="Mohale") + >>> sampletype = api.create(setup.bika_sampletypes, "SampleType", title="Water", Prefix="W") + >>> labcontact = api.create(setup.bika_labcontacts, "LabContact", Firstname="Lab", Lastname="Manager") + >>> department = api.create(setup.bika_departments, "Department", title="Chemistry", Manager=labcontact) + >>> category = api.create(setup.bika_analysiscategories, "AnalysisCategory", title="Metals", Department=department) + >>> Cu = api.create(setup.bika_analysisservices, "AnalysisService", title="Copper", Keyword="Cu", Price="15", Category=category.UID(), Accredited=True) + >>> Fe = api.create(setup.bika_analysisservices, "AnalysisService", title="Iron", Keyword="Fe", Price="10", Category=category.UID()) + >>> Au = api.create(setup.bika_analysisservices, "AnalysisService", title="Gold", Keyword="Au", Price="20", Category=category.UID()) + + +Creation of a Sample with a Partition +------------------------------------- + +Create a Sample and receive: + + >>> sample = new_sample([Cu, Fe]) + +Create a Partition containing of the Sample, containing the analysis `Cu`: + + >>> cu = get_analysis_from(sample, Cu) + >>> partition = create_partition(sample, request, [cu]) + +The analysis 'Cu' lives in the partition: + + >>> cu = get_analysis_from(partition, Cu) + >>> api.get_parent(cu) == partition + True + +Although is also returned by the primary: + + >>> cu = get_analysis_from(sample, Cu) + >>> api.get_parent(cu) == partition + True + >>> api.get_parent(cu) == sample + False + + +Analyses retrieval +------------------ + +Get the ARAnalysesField to play with: + + >>> field = sample.getField("Analyses") + +get_from_instance +................. + +When asked for `Fe` when the primary is given, it returns the analysis, cause +it lives in the primary: + + >>> fe = field.get_from_instance(sample, Fe) + >>> fe.getServiceUID() == api.get_uid(Fe) + True + +But when asked for `Cu` when the primary is given, it returns None, cause it +lives in the partition: + + >>> cu = field.get_from_instance(sample, Cu) + >>> cu is None + True + +While it returns the analysis when the partition is used: + + >>> cu = field.get_from_instance(partition, Cu) + >>> cu.getServiceUID() == api.get_uid(Cu) + True + +But when asking the partition for `Fe` it returns None, cause it lives in the +ancestor: + + >>> fe = field.get_from_instance(partition, Fe) + >>> fe is None + True + +get_from_ancestor +................. + +When asked for `Fe` to primary, it returns None because there is no ancestor +containing `Fe`: + + >>> fe = field.get_from_ancestor(sample, Fe) + >>> fe is None + True + +But when asked for `Fe` to the partition, it returns the analysis, cause it +it lives in an ancestor from the partition: + + >>> fe = field.get_from_ancestor(partition, Fe) + >>> fe.getServiceUID() == api.get_uid(Fe) + True + +If I ask for `Cu`, that lives in the partition, it will return None for both: + + >>> cu = field.get_from_ancestor(sample, Cu) + >>> cu is None + True + + >>> cu = field.get_from_ancestor(partition, Cu) + >>> cu is None + True + +get_from_descendant +................... + +When asked for `Fe` to primary, it returns None because there is no descendant +containing `Fe`: + + >>> fe = field.get_from_descendant(sample, Fe) + >>> fe is None + True + +And same with partition: + + >>> fe = field.get_from_descendant(partition, Fe) + >>> fe is None + True + +When asked for `Cu` to primary, it returns the analysis, because it lives in a +descendant (partition): + + >>> cu = field.get_from_descendant(sample, Cu) + >>> cu.getServiceUID() == api.get_uid(Cu) + True + +But returns None if I ask to the partition: + + >>> cu = field.get_from_descendant(partition, Cu) + >>> cu is None + True + +get_analyses_from_descendants +............................. + +It returns the analyses contained by the descendants: + + >>> field.get_analyses_from_descendants(sample) + [] + + >>> field.get_analyses_from_descendants(partition) + [] + + +Resolution of analyses from the Sample lineage +---------------------------------------------- + +resolve_analysis +................ + +Resolves the analysis from the sample lineage if exists: + + >>> fe = field.resolve_analysis(sample, Fe) + >>> fe.getServiceUID() == api.get_uid(Fe) + True + >>> fe.aq_parent == sample + True + + >>> cu = field.resolve_analysis(sample, Cu) + >>> cu.getServiceUID() == api.get_uid(Cu) + True + >>> cu.aq_parent == partition + True + + >>> au = field.resolve_analysis(sample, Au) + >>> au is None + True + +But when we use the partition and the analysis is found in an ancestor, it +moves the analysis into the partition: + + >>> fe = field.resolve_analysis(partition, Fe) + >>> fe.getServiceUID() == api.get_uid(Fe) + True + >>> fe.aq_parent == partition + True + >>> sample.objectValues("Analysis") + [] + >>> partition.objectValues("Analysis") + [, ] + + +Addition of analyses +-------------------- + +add_analysis +............ + +Setup required parameters: + + >>> prices = hidden = dict() + +If we try to add now an analysis that already exists, either in the partition or +in the primary, the analysis won't be added: + + >>> added = field.add_analysis(sample, Fe, prices, hidden) + >>> added is None + True + >>> sample.objectValues("Analysis") + [] + + >>> added = field.add_analysis(partition, Fe, prices, hidden) + >>> added is None + True + >>> partition.objectValues("Analysis") + [, ] + +If we add a new analysis, this will be added in the sample we are working with: + + >>> au = field.add_analysis(sample, Au, prices, hidden) + >>> au.getServiceUID() == api.get_uid(Au) + True + >>> sample.objectValues("Analysis") + [] + >>> partition.objectValues("Analysis") + [, ] + +Apply the changes: + + >>> transaction.commit() + +If I try to add an analysis that exists in an ancestor, the analysis gets moved +while the function returns None: + + >>> added = field.add_analysis(partition, Au, prices, hidden) + >>> added is None + True + >>> sample.objectValues("Analysis") + [] + >>> partition.objectValues("Analysis") + [, , ] From 3d9d90e7ce77542aa1f70a2b03e49bdff232b797 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 12:54:05 +0100 Subject: [PATCH 11/22] Test setter of ARAnalysesField with partition --- .../ARAnalysesFieldWithPartitions.rst | 80 +++++++++++++++++++ 1 file changed, 80 insertions(+) diff --git a/bika/lims/tests/doctests/ARAnalysesFieldWithPartitions.rst b/bika/lims/tests/doctests/ARAnalysesFieldWithPartitions.rst index 4d27cdc4fe..c02f3c8b2e 100644 --- a/bika/lims/tests/doctests/ARAnalysesFieldWithPartitions.rst +++ b/bika/lims/tests/doctests/ARAnalysesFieldWithPartitions.rst @@ -66,6 +66,7 @@ Create some basic objects for the test: >>> Cu = api.create(setup.bika_analysisservices, "AnalysisService", title="Copper", Keyword="Cu", Price="15", Category=category.UID(), Accredited=True) >>> Fe = api.create(setup.bika_analysisservices, "AnalysisService", title="Iron", Keyword="Fe", Price="10", Category=category.UID()) >>> Au = api.create(setup.bika_analysisservices, "AnalysisService", title="Gold", Keyword="Au", Price="20", Category=category.UID()) + >>> Mg = api.create(setup.bika_analysisservices, "AnalysisService", title="Magnesium", Keyword="Mg", Price="20", Category=category.UID()) Creation of a Sample with a Partition @@ -287,3 +288,82 @@ while the function returns None: [] >>> partition.objectValues("Analysis") [, , ] + + +Set analyses +------------ + +If we try to set same analyses as before to the root sample, nothing happens +because the analyses are already there: + + >>> field.set(sample, [Cu, Fe, Au]) + [] + +The analyses still belong to the partition though: + + >>> sample.objectValues("Analysis") + [] + >>> partition.objectValues("Analysis") + [, , ] + +Same result if I set the analyses to the partition: + + >>> field.set(partition, [Cu, Fe, Au]) + [] + >>> sample.objectValues("Analysis") + [] + >>> partition.objectValues("Analysis") + [, , ] + +If I add a new analysis in the list, the analysis is successfully added: + + >>> field.set(sample, [Cu, Fe, Au, Mg]) + [] + >>> sample.objectValues("Analysis") + [] + +And the partition keeps its own analyses: + + >>> partition.objectValues("Analysis") + [, , ] + +Apply the changes: + + >>> transaction.commit() + +If I set the same analyses to the partition, I don't get any result: + + >>> field.set(partition, [Cu, Fe, Au, Mg]) + [] + +but, the `Mg` analysis has been moved into the partition: + + >>> sample.objectValues("Analysis") + [] + >>> partition.objectValues("Analysis") + [, , , ] + +To remove `Mg` analysis, pass the list without `Mg`: + + >>> field.set(sample, [Cu, Fe, Au]) + [] + +The analysis `Mg` has been removed, although it belonged to the partition: + + >>> sample.objectValues("Analysis") + [] + >>> partition.objectValues("Analysis") + [, , ] + +But if I add a new analysis to the primary and I try to remove it from the +partition, nothing will happen: + + >>> field.set(sample, [Cu, Fe, Au, Mg]) + [] + + >>> field.set(partition, [Cu, Fe, Au]) + [] + >>> sample.objectValues("Analysis") + [] + >>> partition.objectValues("Analysis") + [, , ] From 8a1fefc496c91b17d185545e6bdb04fce8166ae5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 18:39:49 +0100 Subject: [PATCH 12/22] Getter cleanup --- bika/lims/browser/fields/aranalysesfield.py | 22 ++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/bika/lims/browser/fields/aranalysesfield.py b/bika/lims/browser/fields/aranalysesfield.py index 10c6afb2cc..8ae2481dab 100644 --- a/bika/lims/browser/fields/aranalysesfield.py +++ b/bika/lims/browser/fields/aranalysesfield.py @@ -25,7 +25,6 @@ from Products.Archetypes.Registry import registerField from Products.Archetypes.public import Field from Products.Archetypes.public import ObjectField -from Products.CMFCore.utils import getToolByName from zope.interface import implements from bika.lims import api @@ -73,16 +72,21 @@ def get(self, instance, **kwargs): :param kwargs: Keyword arguments to inject in the search query :returns: A list of Analysis Objects/Catalog Brains """ - catalog = getToolByName(instance, CATALOG_ANALYSIS_LISTING) - query = dict( - [(k, v) for k, v in kwargs.items() if k in catalog.indexes()]) + # Do we need to return objects or brains + full_objects = kwargs.get("full_objects", False) + + # Bail out parameters from kwargs that don't match with indexes + catalog = api.get_tool(CATALOG_ANALYSIS_LISTING) + indexes = catalog.indexes() + query = dict([(k, v) for k, v in kwargs.items() if k in indexes]) + + # Do the search against the catalog query["portal_type"] = "Analysis" query["getRequestUID"] = api.get_uid(instance) - analyses = catalog(query) - if not kwargs.get("full_objects", False): - return analyses - - return map(api.get_object, analyses) + brains = catalog(query) + if full_objects: + return map(api.get_object, brains) + return brains security.declarePrivate('set') From a986a2914231b0afe09c9c76d06f7e616f47ca35 Mon Sep 17 00:00:00 2001 From: Ramon Bartl Date: Thu, 16 Jan 2020 18:53:15 +0100 Subject: [PATCH 13/22] Updated migration step --- bika/lims/upgrade/v01_03_003.py | 100 ++++++++++++++++++++------------ 1 file changed, 64 insertions(+), 36 deletions(-) diff --git a/bika/lims/upgrade/v01_03_003.py b/bika/lims/upgrade/v01_03_003.py index 6e2f884c0b..36127680c4 100644 --- a/bika/lims/upgrade/v01_03_003.py +++ b/bika/lims/upgrade/v01_03_003.py @@ -18,15 +18,15 @@ # Copyright 2018-2019 by it's authors. # Some rights reserved, see README and LICENSE. -from Products.Archetypes.config import UID_CATALOG - from bika.lims import api from bika.lims import logger from bika.lims.catalog.bikasetup_catalog import SETUP_CATALOG from bika.lims.config import PROJECTNAME as product +from bika.lims.interfaces import ISubmitted from bika.lims.setuphandlers import setup_form_controller_actions from bika.lims.upgrade import upgradestep from bika.lims.upgrade.utils import UpgradeUtils +from Products.Archetypes.config import UID_CATALOG version = "1.3.3" # Remember version number in metadata.xml and setup.py profile = "profile-{0}:default".format(product) @@ -297,7 +297,8 @@ def remove_cascaded_analyses_of_root_samples(portal): # get the contained analyses of the root sample root_analyses = root_sample.objectIds(spec=["Analysis"]) - analyses_to_remove = [] + # Mapping of cascaded Analysis -> Partition + analysis_mapping = {} # check if a root analysis is located as well in one of the partitions for partition in root_sample.getDescendants(): @@ -305,39 +306,66 @@ def remove_cascaded_analyses_of_root_samples(portal): part_analyses = partition.objectIds(spec=["Analysis"]) # filter analyses that cascade root analyses cascaded = filter(lambda an: an in root_analyses, part_analyses) - # Some of the partition analyses cascade the root analyses - if cascaded: - # remember IDs to be removed from the root sample - analyses_to_remove.extend(cascaded) - logger.debug( - "Sample {} contains cascaded Analyses of Partition {}: {}" - .format(api.get_id(root_sample), - api.get_id(partition), - cascaded)) - - if analyses_to_remove: - # append to cleanup list - to_clean.append((root_sample, analyses_to_remove)) - - if to_clean: - logger.info("Found {} Root Samples that contain cascaded Analyses" - .format(len(to_clean))) - - for sample, analyses in to_clean: - sid = api.get_id(sample) - for analysis in analyses: - an = sample[analysis] - state = api.get_workflow_status_of(an) - if state != "unassigned": - # XXX What to do when assigned, rejected ... ? - pass - logger.info("Deleting Analysis '{}' in State '{}' from '{}'" - .format(analysis, state, sid)) - # Uncomment before flight - # sample._delObject(analysis) - - logger.info("Removing cascaded analyses from {} Root Samples... [DONE]" - .format(len(to_clean))) + # keep a mapping of analysis -> partition + for analysis in cascaded: + analysis_mapping[analysis] = partition + + if analysis_mapping: + to_clean.append((root_sample, analysis_mapping)) + + # cleanup cascaded analyses + # mapping maps the analysis id -> partition + for sample, mapping in to_clean: + + # go through the cascaded analyses and decide if the cascaded analysis + # should be removed from (a) the root sample or (b) the partition. + + for analysis_id, partition in mapping.items(): + + # analysis from the root sample + root_an = sample[analysis_id] + # WF state from the root sample analysis + root_an_state = api.get_workflow_status_of(root_an) + + # analysis from the partition sample + part_an = partition[analysis_id] + # WF state from the partition sample analysis + part_an_state = api.get_workflow_status_of(part_an) + + # both analyses have the same WF state + if root_an_state == part_an_state: + # -> remove the analysis from the root sample + sample._delObject(analysis_id) + logger.info( + "Remove analysis '{}' in state '{}' from sample {}: {}" + .format(analysis_id, root_an_state, + api.get_id(sample), api.get_url(sample))) + + # root analysis was submitted, but not the partition analysis + elif ISubmitted.providedBy(root_an) and not ISubmitted.providedBy(part_an): + # -> remove the analysis from the partition + partition._delObject(analysis_id) + logger.info( + "Remove analysis '{}' in state '{}' from partition {}: {}" + .format(analysis_id, part_an_state, + api.get_id(partition), api.get_url(partition))) + + # partition analysis was submitted, but not the root analysis + elif ISubmitted.providedBy(part_an) and not ISubmitted.providedBy(root_an): + # -> remove the analysis from the root sample + sample._delObject(analysis_id) + logger.info( + "Remove analysis '{}' in state '{}' from sample {}: {}" + .format(analysis_id, root_an_state, + api.get_id(sample), api.get_url(sample))) + + # inconsistent state + else: + logger.warning( + "Can not handle analysis '{}' located in '{}' and '{}'" + .format(analysis_id, repr(sample), repr(partition))) + + logger.info("Removing cascaded analyses from Root Samples... [DONE]") def reindex_client_fields(portal): From 155bdb139d773125946a289ef4365d25cd77d950 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 18:54:52 +0100 Subject: [PATCH 14/22] Remove masking in ZCatalog's monkey --- bika/lims/browser/fields/aranalysesfield.py | 2 +- bika/lims/monkey/zcatalog.py | 23 +++------------------ 2 files changed, 4 insertions(+), 21 deletions(-) diff --git a/bika/lims/browser/fields/aranalysesfield.py b/bika/lims/browser/fields/aranalysesfield.py index 8ae2481dab..6c78368855 100644 --- a/bika/lims/browser/fields/aranalysesfield.py +++ b/bika/lims/browser/fields/aranalysesfield.py @@ -82,7 +82,7 @@ def get(self, instance, **kwargs): # Do the search against the catalog query["portal_type"] = "Analysis" - query["getRequestUID"] = api.get_uid(instance) + query["getAncestorsUIDs"] = api.get_uid(instance) brains = catalog(query) if full_objects: return map(api.get_object, brains) diff --git a/bika/lims/monkey/zcatalog.py b/bika/lims/monkey/zcatalog.py index 1784ea289b..344fe5f62a 100644 --- a/bika/lims/monkey/zcatalog.py +++ b/bika/lims/monkey/zcatalog.py @@ -33,32 +33,15 @@ def searchResults(self, REQUEST=None, used=None, **kw): and self.id == CATALOG_ANALYSIS_LISTING: # Fetch all analyses that have the request UID passed in as an ancestor, - # cause we want Primary ARs to always display the analyses from their - # derived ARs (if result is not empty) - + # cause we want for Samples to always return the contained analyses plus + # those contained in partitions request = REQUEST.copy() orig_uid = request.get('getRequestUID') - # If a list of request uid, retrieve them sequentially to make the - # masking process easier - if isinstance(orig_uid, list): - results = list() - for uid in orig_uid: - request['getRequestUID'] = [uid] - results += self.searchResults(REQUEST=request, used=used, **kw) - return results - # Get all analyses, those from descendant ARs included del request['getRequestUID'] request['getAncestorsUIDs'] = orig_uid - results = self.searchResults(REQUEST=request, used=used, **kw) - - # Masking - primary = filter(lambda an: an.getParentUID == orig_uid, results) - derived = filter(lambda an: an.getParentUID != orig_uid, results) - derived_keys = map(lambda an: an.getKeyword, derived) - results = filter(lambda an: an.getKeyword not in derived_keys, primary) - return results + derived + return self.searchResults(REQUEST=request, used=used, **kw) # Normal search return self._catalog.searchResults(REQUEST, used, **kw) From 9c4e1b80454602a5702f05549c7ce0b7bbf4df1e Mon Sep 17 00:00:00 2001 From: Ramon Bartl Date: Thu, 16 Jan 2020 20:01:12 +0100 Subject: [PATCH 15/22] Skip rejected or retracted analyses from partitions --- bika/lims/upgrade/v01_03_003.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/bika/lims/upgrade/v01_03_003.py b/bika/lims/upgrade/v01_03_003.py index 36127680c4..c9a41b74b9 100644 --- a/bika/lims/upgrade/v01_03_003.py +++ b/bika/lims/upgrade/v01_03_003.py @@ -341,6 +341,12 @@ def remove_cascaded_analyses_of_root_samples(portal): .format(analysis_id, root_an_state, api.get_id(sample), api.get_url(sample))) + # partition analysis is in invalid state + elif part_an_state in ["rejected", "retracted"]: + # -> probably the retest was automatically created in the + # parent instead of the partition + pass + # root analysis was submitted, but not the partition analysis elif ISubmitted.providedBy(root_an) and not ISubmitted.providedBy(part_an): # -> remove the analysis from the partition From 5fcd853b45c4a8a9e0b11c9342048175043040db Mon Sep 17 00:00:00 2001 From: Ramon Bartl Date: Thu, 16 Jan 2020 20:11:14 +0100 Subject: [PATCH 16/22] Also igonre root analyses that are in invalid state --- bika/lims/upgrade/v01_03_003.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/bika/lims/upgrade/v01_03_003.py b/bika/lims/upgrade/v01_03_003.py index c9a41b74b9..921f5ef131 100644 --- a/bika/lims/upgrade/v01_03_003.py +++ b/bika/lims/upgrade/v01_03_003.py @@ -341,6 +341,12 @@ def remove_cascaded_analyses_of_root_samples(portal): .format(analysis_id, root_an_state, api.get_id(sample), api.get_url(sample))) + # root analysis is in invalid state + elif root_an_state in ["rejected", "retracted"]: + # -> probably the retest was automatically created in the + # parent instead of the partition + pass + # partition analysis is in invalid state elif part_an_state in ["rejected", "retracted"]: # -> probably the retest was automatically created in the From 0ac1a6d00a420bccfd8f77b71a88b78f647f1e78 Mon Sep 17 00:00:00 2001 From: Ramon Bartl Date: Thu, 16 Jan 2020 20:42:39 +0100 Subject: [PATCH 17/22] Improved migration step --- bika/lims/upgrade/v01_03_003.py | 39 +++++++++++++++++++++++++++++++-- 1 file changed, 37 insertions(+), 2 deletions(-) diff --git a/bika/lims/upgrade/v01_03_003.py b/bika/lims/upgrade/v01_03_003.py index 921f5ef131..03bd53b29b 100644 --- a/bika/lims/upgrade/v01_03_003.py +++ b/bika/lims/upgrade/v01_03_003.py @@ -18,11 +18,15 @@ # Copyright 2018-2019 by it's authors. # Some rights reserved, see README and LICENSE. +from collections import defaultdict +from operator import itemgetter + from bika.lims import api from bika.lims import logger from bika.lims.catalog.bikasetup_catalog import SETUP_CATALOG from bika.lims.config import PROJECTNAME as product from bika.lims.interfaces import ISubmitted +from bika.lims.interfaces import IVerified from bika.lims.setuphandlers import setup_form_controller_actions from bika.lims.upgrade import upgradestep from bika.lims.upgrade.utils import UpgradeUtils @@ -313,6 +317,9 @@ def remove_cascaded_analyses_of_root_samples(portal): if analysis_mapping: to_clean.append((root_sample, analysis_mapping)) + # count the cases for each condition + case_counter = defaultdict(int) + # cleanup cascaded analyses # mapping maps the analysis id -> partition for sample, mapping in to_clean: @@ -332,6 +339,8 @@ def remove_cascaded_analyses_of_root_samples(portal): # WF state from the partition sample analysis part_an_state = api.get_workflow_status_of(part_an) + case_counter["{}_{}".format(root_an_state, part_an_state)] += 1 + # both analyses have the same WF state if root_an_state == part_an_state: # -> remove the analysis from the root sample @@ -341,6 +350,27 @@ def remove_cascaded_analyses_of_root_samples(portal): .format(analysis_id, root_an_state, api.get_id(sample), api.get_url(sample))) + # both are in verified/published state + elif IVerified.providedBy(root_an) and IVerified.providedBy(part_an): + root_an_result = root_an.getResult() + part_an_result = root_an.getResult() + if root_an_result == part_an_result: + # remove the root analysis + sample._delObject(analysis_id) + logger.info( + "Remove analysis '{}' in state '{}' from sample {}: {}" + .format(analysis_id, root_an_state, + api.get_id(sample), api.get_url(sample))) + else: + # -> unsolvable edge case + # display an error message + logger.error( + "Analysis '{}' of root sample in state '{}' " + "and Analysis of partition in state {}. " + "Please fix manually: {}" + .format(analysis_id, root_an_state, part_an_state, + api.get_url(sample))) + # root analysis is in invalid state elif root_an_state in ["rejected", "retracted"]: # -> probably the retest was automatically created in the @@ -374,11 +404,16 @@ def remove_cascaded_analyses_of_root_samples(portal): # inconsistent state else: logger.warning( - "Can not handle analysis '{}' located in '{}' and '{}'" - .format(analysis_id, repr(sample), repr(partition))) + "Can not handle analysis '{}' located in '{}' (state {}) and '{}' (state {})" + .format(analysis_id, + repr(sample), root_an_state, + repr(partition), part_an_state)) logger.info("Removing cascaded analyses from Root Samples... [DONE]") + logger.info("State Combinations (root_an_state, part_an_state): {}" + .format(sorted(case_counter.items(), key=itemgetter(1), reverse=True))) + def reindex_client_fields(portal): logger.info("Reindexing client fields ...") From 0f1c04971dc4a086f14efe8be58dfad264520d79 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 22:12:32 +0100 Subject: [PATCH 18/22] Add a link to partition in analysis listing if necessary --- bika/lims/browser/analyses/view.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/bika/lims/browser/analyses/view.py b/bika/lims/browser/analyses/view.py index f2ca83cb1d..5a81de8420 100644 --- a/bika/lims/browser/analyses/view.py +++ b/bika/lims/browser/analyses/view.py @@ -563,6 +563,8 @@ def folderitem(self, obj, item, index): self._folder_item_detection_limits(obj, item) # Fill Specifications self._folder_item_specifications(obj, item) + # Fill Partition + self._folder_item_partition(obj, item) # Fill Due Date and icon if late/overdue self._folder_item_duedate(obj, item) # Fill verification criteria @@ -1172,6 +1174,20 @@ def _folder_item_accredited_icon(self, analysis_brain, item): img = get_image("accredited.png", title=t(_("Accredited"))) self._append_html_element(item, "Service", img) + def _folder_item_partition(self, analysis_brain, item): + """Adds an anchor to the partition if the current analysis is from a + partition that does not match with the current context + """ + if not IAnalysisRequest.providedBy(self.context): + return + + sample_id = analysis_brain.getRequestID + if sample_id != api.get_id(self.context): + part_url = analysis_brain.getRequestURL + url = get_link(part_url, value=sample_id, **{"class": "small"}) + title = item["replace"].get("Service") or item["Service"] + item["replace"]["Service"] = "{}
{}".format(title, url) + def _folder_item_report_visibility(self, analysis_brain, item): """Set if the hidden field can be edited (enabled/disabled) From dde36ca800acdbb04ac315256f2026c72df641e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 22:21:11 +0100 Subject: [PATCH 19/22] Handle None items gracefully --- bika/lims/browser/fields/aranalysesfield.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bika/lims/browser/fields/aranalysesfield.py b/bika/lims/browser/fields/aranalysesfield.py index 6c78368855..f3fbfd6082 100644 --- a/bika/lims/browser/fields/aranalysesfield.py +++ b/bika/lims/browser/fields/aranalysesfield.py @@ -104,6 +104,9 @@ def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): :type hidden: list :returns: list of new assigned Analyses """ + if items is None: + items = [] + # Bail out if the items is not a list type if not isinstance(items, (list, tuple)): raise TypeError( From 9d2b839bb43b0748d582f6da43b3172d0a98ca34 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Thu, 16 Jan 2020 22:50:13 +0100 Subject: [PATCH 20/22] Changelog --- CHANGES.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES.rst b/CHANGES.rst index 1b2a4467c6..57c7e94ca0 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -7,6 +7,7 @@ Changelog **Added** +- #1505 Display partition link in analyses listing - #1491 Enable Audit-logging for Dexterity Contents - #1489 Support Multiple Catalogs for Dexterity Contents - #1481 Filter Templates field when Sample Type is selected in Sample Add form From f1d2869bc1b91545a3cd91f0a0ac909b521ce3bb Mon Sep 17 00:00:00 2001 From: Ramon Bartl Date: Fri, 17 Jan 2020 17:14:14 +0100 Subject: [PATCH 21/22] Fetch transitions in manage analyses view --- bika/lims/browser/analysisrequest/manage_analyses.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bika/lims/browser/analysisrequest/manage_analyses.py b/bika/lims/browser/analysisrequest/manage_analyses.py index afa74147ac..1db380950c 100644 --- a/bika/lims/browser/analysisrequest/manage_analyses.py +++ b/bika/lims/browser/analysisrequest/manage_analyses.py @@ -60,7 +60,6 @@ def __init__(self, context, request): self.show_select_all_checkbox = False self.pagesize = 999999 self.show_search = True - self.fetch_transitions_on_select = False self.categories = [] self.selected = [] From 2478cdf17af9d81c600bcccc42aa0af4cb86f200 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jordi=20Puiggen=C3=A9?= Date: Fri, 17 Jan 2020 22:35:47 +0100 Subject: [PATCH 22/22] Changelog --- CHANGES.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES.rst b/CHANGES.rst index 57c7e94ca0..9ae1bc4d6e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -31,6 +31,7 @@ Changelog **Fixed** +- #1505 Manage Analyses Form re-applies partitioned Analyses back to the Root - #1503 Avoid duplicate CSS IDs in multi-column Add form - #1501 Fix Attribute Error in Reference Sample Popup - #1493 jsonapi.read omits `include_methods` when a single parameter is used