diff --git a/CHANGES.rst b/CHANGES.rst index fbabb9fa2e..772fbaa8ff 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -43,6 +43,8 @@ Changelog **Removed** +- #1530 Removed ARImport +- #1530 Removed stale type registrations - #1541 Remove add/edit options of ReferenceWidget - #1535 Remove `zcatalog` monkey (and `getRequestUID` index) - #1518 Removed stale indexes from `analysis_catalog` diff --git a/bika/lims/__init__.py b/bika/lims/__init__.py index 1801d75700..14815547ae 100644 --- a/bika/lims/__init__.py +++ b/bika/lims/__init__.py @@ -73,7 +73,6 @@ def initialize(context): from content.analysisrequestsfolder import AnalysisRequestsFolder # noqa from content.analysisservice import AnalysisService # noqa from content.analysisspec import AnalysisSpec # noqa - from content.arimport import ARImport # noqa from content.arreport import ARReport # noqa from content.artemplate import ARTemplate # noqa from content.attachment import Attachment # noqa @@ -82,7 +81,6 @@ def initialize(context): from content.batch import Batch # noqa from content.batchfolder import BatchFolder # noqa from content.batchlabel import BatchLabel # noqa - from content.bikacache import BikaCache # noqa from content.bikaschema import BikaSchema # noqa from content.bikasetup import BikaSetup # noqa from content.calculation import Calculation # noqa @@ -103,8 +101,6 @@ def initialize(context): from content.instrumenttype import InstrumentType # noqa from content.instrumentvalidation import InstrumentValidation # noqa from content.invoice import Invoice # noqa - from content.invoicebatch import InvoiceBatch # noqa - from content.invoicefolder import InvoiceFolder # noqa from content.labcontact import LabContact # noqa from content.laboratory import Laboratory # noqa from content.labproduct import LabProduct # noqa @@ -125,12 +121,9 @@ def initialize(context): from content.rejectanalysis import RejectAnalysis # noqa from content.report import Report # noqa from content.reportfolder import ReportFolder # noqa - from content.sample import Sample # noqa from content.samplecondition import SampleCondition # noqa from content.samplematrix import SampleMatrix # noqa - from content.samplepartition import SamplePartition # noqa from content.samplepoint import SamplePoint # noqa - from content.samplesfolder import SamplesFolder # noqa from content.sampletype import SampleType # noqa from content.samplingdeviation import SamplingDeviation # noqa from content.srtemplate import SRTemplate # noqa diff --git a/bika/lims/browser/arimports.py b/bika/lims/browser/arimports.py deleted file mode 100644 index e576b1db43..0000000000 --- a/bika/lims/browser/arimports.py +++ /dev/null @@ -1,221 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of SENAITE.CORE. -# -# SENAITE.CORE is free software: you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation, version 2. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., 51 -# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Copyright 2018-2020 by it's authors. -# Some rights reserved, see README and LICENSE. - -import csv -from DateTime.DateTime import DateTime -from bika.lims import bikaMessageFactory as _ -from bika.lims.browser import BrowserView, ulocalized_time -from bika.lims.browser.bika_listing import BikaListingView -from bika.lims.interfaces import IClient -from bika.lims.utils import tmpID -from bika.lims.workflow import getTransitionDate -from plone.app.contentlisting.interfaces import IContentListing -from plone.app.layout.globals.interfaces import IViewView -from plone.protect import CheckAuthenticator -from Products.Archetypes.utils import addStatusMessage -from Products.CMFCore.utils import getToolByName -from Products.CMFCore.WorkflowCore import WorkflowException -from Products.CMFPlone.utils import _createObjectByType -from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile -from zope.interface import alsoProvides -from zope.interface import implements - -import os - - -class ARImportsView(BikaListingView): - implements(IViewView) - - def __init__(self, context, request): - super(ARImportsView, self).__init__(context, request) - request.set('disable_plone.rightcolumn', 1) - alsoProvides(request, IContentListing) - - self.catalog = "portal_catalog" - self.contentFilter = { - 'portal_type': 'ARImport', - 'is_active': True, - 'sort_on': 'sortable_title', - } - self.context_actions = {} - if IClient.providedBy(self.context): - self.context_actions = { - _('AR Import'): { - 'url': 'arimport_add', - 'icon': '++resource++bika.lims.images/add.png'}} - - self.show_select_row = False - self.show_select_column = False - self.pagesize = 50 - self.form_id = "arimports" - - self.icon = \ - self.portal_url + "/++resource++bika.lims.images/arimport_big.png" - self.title = self.context.translate(_("Sample Imports")) - self.description = "" - - self.columns = { - 'Title': {'title': _('Title')}, - 'Client': {'title': _('Client')}, - 'Filename': {'title': _('Filename')}, - 'Creator': {'title': _('Date Created')}, - 'DateCreated': {'title': _('Date Created')}, - 'DateValidated': {'title': _('Date Validated')}, - 'DateImported': {'title': _('Date Imported')}, - 'state_title': {'title': _('State')}, - } - self.review_states = [ - {'id': 'default', - 'title': _('Pending'), - 'contentFilter': {'review_state': ['invalid', 'valid']}, - 'columns': ['Title', - 'Creator', - 'Filename', - 'Client', - 'DateCreated', - 'DateValidated', - 'DateImported', - 'state_title']}, - {'id': 'imported', - 'title': _('Imported'), - 'contentFilter': {'review_state': 'imported'}, - 'columns': ['Title', - 'Creator', - 'Filename', - 'Client', - 'DateCreated', - 'DateValidated', - 'DateImported', - 'state_title']}, - ] - - def folderitems(self, **kwargs): - items = super(ARImportsView, self).folderitems() - for x in range(len(items)): - if 'obj' not in items[x]: - continue - obj = items[x]['obj'] - items[x]['Title'] = obj.title_or_id() - if items[x]['review_state'] == 'invalid': - items[x]['replace']['Title'] = "%s" % ( - obj.absolute_url(), items[x]['Title']) - else: - items[x]['replace']['Title'] = "%s" % ( - obj.absolute_url(), items[x]['Title']) - items[x]['Creator'] = obj.Creator() - items[x]['Filename'] = obj.getFilename() - parent = obj.aq_parent - items[x]['Client'] = parent if IClient.providedBy(parent) else '' - items[x]['replace']['Client'] = "%s" % ( - parent.absolute_url(), parent.Title()) - items[x]['DateCreated'] = ulocalized_time( - obj.created(), long_format=True, time_only=False, context=obj) - date = getTransitionDate(obj, 'validate') - items[x]['DateValidated'] = date if date else '' - date = getTransitionDate(obj, 'import') - items[x]['DateImported'] = date if date else '' - - return items - - -class ClientARImportsView(ARImportsView): - def __init__(self, context, request): - super(ClientARImportsView, self).__init__(context, request) - self.contentFilter['path'] = { - 'query': '/'.join(context.getPhysicalPath()) - } - - self.review_states = [ - {'id': 'default', - 'title': _('Pending'), - 'contentFilter': {'review_state': ['invalid', 'valid']}, - 'columns': ['Title', - 'Creator', - 'Filename', - 'DateCreated', - 'DateValidated', - 'DateImported', - 'state_title']}, - {'id': 'imported', - 'title': _('Imported'), - 'contentFilter': {'review_state': 'imported'}, - 'columns': ['Title', - 'Creator', - 'Filename', - 'DateCreated', - 'DateValidated', - 'DateImported', - 'state_title']}, - ] - - -class ClientARImportAddView(BrowserView): - implements(IViewView) - template = ViewPageTemplateFile('templates/arimport_add.pt') - - def __init__(self, context, request): - super(ClientARImportAddView, self).__init__(context, request) - alsoProvides(request, IContentListing) - - def __call__(self): - request = self.request - form = request.form - CheckAuthenticator(form) - if form.get('submitted'): - # Validate form submission - csvfile = form.get('csvfile') - data = csvfile.read() - lines = data.splitlines() - filename = csvfile.filename - if not csvfile: - addStatusMessage(request, _("No file selected")) - return self.template() - if len(lines) < 3: - addStatusMessage(request, _("Too few lines in CSV file")) - return self.template() - # Create the arimport object - arimport = _createObjectByType("ARImport", self.context, tmpID()) - arimport.processForm() - arimport.setTitle(self.mkTitle(filename)) - arimport.schema['OriginalFile'].set(arimport, data) - # Save all fields from the file into the arimport schema - arimport.save_header_data() - arimport.save_sample_data() - # immediate batch creation if required - arimport.create_or_reference_batch() - # Attempt first validation - try: - workflow = getToolByName(self.context, 'portal_workflow') - workflow.doActionFor(arimport, 'validate') - except WorkflowException: - self.request.response.redirect(arimport.absolute_url() + - "/edit") - else: - return self.template() - - def mkTitle(self, filename): - pc = getToolByName(self.context, 'portal_catalog') - nr = 1 - while True: - newname = '%s-%s' % (os.path.splitext(filename)[0], nr) - existing = pc(portal_type='ARImport', title=newname) - if not existing: - return newname - nr += 1 diff --git a/bika/lims/browser/arimports.zcml b/bika/lims/browser/arimports.zcml deleted file mode 100644 index eb4cf50f12..0000000000 --- a/bika/lims/browser/arimports.zcml +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - - - diff --git a/bika/lims/browser/configure.zcml b/bika/lims/browser/configure.zcml index 44330ff0f5..26a222aac9 100644 --- a/bika/lims/browser/configure.zcml +++ b/bika/lims/browser/configure.zcml @@ -11,7 +11,6 @@ - diff --git a/bika/lims/browser/templates/arimport_add.pt b/bika/lims/browser/templates/arimport_add.pt deleted file mode 100644 index b71d855cc2..0000000000 --- a/bika/lims/browser/templates/arimport_add.pt +++ /dev/null @@ -1,38 +0,0 @@ - - -< i18n:translate="" - tal:content="Add SampleImport"/> - - - - - -
- -

- - Import Sample Data -

- -
- - -
- -
- -
- -
- - - diff --git a/bika/lims/config.py b/bika/lims/config.py index 770da18755..b520cf53f6 100644 --- a/bika/lims/config.py +++ b/bika/lims/config.py @@ -88,11 +88,6 @@ ('r', _('Render in Report')), ('i', _('Ignore in Report')), )) -ARIMPORT_OPTIONS = DisplayList(( - ('c', _('Classic')), - ('p', _('Profiles')), - # ('s', _('Special')), -)) GENDERS = DisplayList(( ('male', _('Male')), ('female', _('Female')), diff --git a/bika/lims/content/arimport.py b/bika/lims/content/arimport.py deleted file mode 100644 index fc2f245156..0000000000 --- a/bika/lims/content/arimport.py +++ /dev/null @@ -1,964 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of SENAITE.CORE. -# -# SENAITE.CORE is free software: you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation, version 2. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., 51 -# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Copyright 2018-2020 by it's authors. -# Some rights reserved, see README and LICENSE. - -from AccessControl import ClassSecurityInfo -import csv -from copy import deepcopy -from DateTime.DateTime import DateTime -from Products.Archetypes.event import ObjectInitializedEvent -from Products.CMFCore.WorkflowCore import WorkflowException -from bika.lims import bikaMessageFactory as _ -from bika.lims.browser import ulocalized_time -from bika.lims.config import PROJECTNAME -from bika.lims.content.bikaschema import BikaSchema -from bika.lims.content.analysisrequest import schema as ar_schema -from bika.lims.content.sample import schema as sample_schema -from bika.lims.idserver import renameAfterCreation -from bika.lims.interfaces import IARImport, IClient -from bika.lims.utils import tmpID -from bika.lims.utils.analysisrequest import create_analysisrequest -from bika.lims.vocabularies import CatalogVocabulary -from bika.lims.workflow import doActionFor -from collective.progressbar.events import InitialiseProgressBar -from collective.progressbar.events import ProgressBar -from collective.progressbar.events import ProgressState -from collective.progressbar.events import UpdateProgressEvent -from Products.Archetypes import atapi -from Products.Archetypes.public import * -from plone.app.blob.field import FileField as BlobFileField -from Products.Archetypes.references import HoldingReference -from Products.Archetypes.utils import addStatusMessage -from Products.CMFCore.utils import getToolByName -from Products.CMFPlone.utils import _createObjectByType -from Products.DataGridField import CheckboxColumn -from Products.DataGridField import Column -from Products.DataGridField import DataGridField -from Products.DataGridField import DataGridWidget -from Products.DataGridField import DateColumn -from Products.DataGridField import LinesColumn -from Products.DataGridField import SelectColumn -from zope import event -from zope.event import notify -from zope.i18nmessageid import MessageFactory -from zope.interface import implements - -from bika.lims.browser.widgets import ReferenceWidget as bReferenceWidget - -import sys -import transaction - -_p = MessageFactory(u"plone") - -OriginalFile = BlobFileField( - 'OriginalFile', - widget=ComputedWidget( - visible=False - ), -) - -Filename = StringField( - 'Filename', - widget=StringWidget( - label=_('Original Filename'), - visible=True - ), -) - -NrSamples = StringField( - 'NrSamples', - widget=StringWidget( - label=_('Number of samples'), - visible=True - ), -) - -ClientName = StringField( - 'ClientName', - searchable=True, - widget=StringWidget( - label=_("Client Name"), - ), -) - -ClientID = StringField( - 'ClientID', - searchable=True, - widget=StringWidget( - label=_('Client ID'), - ), -) - -ClientOrderNumber = StringField( - 'ClientOrderNumber', - searchable=True, - widget=StringWidget( - label=_('Client Order Number'), - ), -) - -ClientReference = StringField( - 'ClientReference', - searchable=True, - widget=StringWidget( - label=_('Client Reference'), - ), -) - -Contact = ReferenceField( - 'Contact', - allowed_types=('Contact',), - relationship='ARImportContact', - default_method='getContactUIDForUser', - referenceClass=HoldingReference, - vocabulary_display_path_bound=sys.maxint, - widget=ReferenceWidget( - label=_('Primary Contact'), - size=20, - visible=True, - base_query={'is_active': True}, - showOn=True, - popup_width='300px', - colModel=[{'columnName': 'UID', 'hidden': True}, - {'columnName': 'Fullname', 'width': '100', - 'label': _('Name')}], - ), -) - -Batch = ReferenceField( - 'Batch', - allowed_types=('Batch',), - relationship='ARImportBatch', - widget=bReferenceWidget( - label=_('Batch'), - visible=True, - catalog_name='bika_catalog', - base_query={'review_state': 'open'}, - showOn=True, - ), -) - -CCContacts = DataGridField( - 'CCContacts', - allow_insert=False, - allow_delete=False, - allow_reorder=False, - allow_empty_rows=False, - columns=('CCNamesReport', - 'CCEmailsReport', - 'CCNamesInvoice', - 'CCEmailsInvoice'), - default=[{'CCNamesReport': [], - 'CCEmailsReport': [], - 'CCNamesInvoice': [], - 'CCEmailsInvoice': [] - }], - widget=DataGridWidget( - columns={ - 'CCNamesReport': LinesColumn('Report CC Contacts'), - 'CCEmailsReport': LinesColumn('Report CC Emails'), - 'CCNamesInvoice': LinesColumn('Invoice CC Contacts'), - 'CCEmailsInvoice': LinesColumn('Invoice CC Emails') - } - ) -) - -SampleData = DataGridField( - 'SampleData', - allow_insert=True, - allow_delete=True, - allow_reorder=False, - allow_empty_rows=False, - allow_oddeven=True, - columns=('ClientSampleID', - 'SamplingDate', - 'DateSampled', - 'SamplePoint', - 'SampleMatrix', - 'SampleType', # not a schema field! - 'ContainerType', # not a schema field! - 'Analyses', # not a schema field! - 'Profiles' # not a schema field! - ), - widget=DataGridWidget( - label=_('Samples'), - columns={ - 'ClientSampleID': Column('Sample ID'), - 'SamplingDate': DateColumn('Sampling Date'), - 'DateSampled': DateColumn('Date Sampled'), - 'SamplePoint': SelectColumn( - 'Sample Point', vocabulary='Vocabulary_SamplePoint'), - 'SampleMatrix': SelectColumn( - 'Sample Matrix', vocabulary='Vocabulary_SampleMatrix'), - 'SampleType': SelectColumn( - 'Sample Type', vocabulary='Vocabulary_SampleType'), - 'ContainerType': SelectColumn( - 'Container', vocabulary='Vocabulary_ContainerType'), - 'Analyses': LinesColumn('Analyses'), - 'Profiles': LinesColumn('Profiles'), - } - ) -) - -Errors = LinesField( - 'Errors', - widget=LinesWidget( - label=_('Errors'), - rows=10, - ) -) - -schema = BikaSchema.copy() + Schema(( - OriginalFile, - Filename, - NrSamples, - ClientName, - ClientID, - ClientOrderNumber, - ClientReference, - Contact, - CCContacts, - Batch, - SampleData, - Errors, -)) - -schema['title'].validators = () -# Update the validation layer after change the validator in runtime -schema['title']._validationLayer() - - -class ARImport(BaseFolder): - security = ClassSecurityInfo() - schema = schema - displayContentsTab = False - implements(IARImport) - - _at_rename_after_creation = True - - def _renameAfterCreation(self, check_auto_id=False): - renameAfterCreation(self) - - def guard_validate_transition(self): - """We may only attempt validation if file data has been uploaded. - """ - data = self.getOriginalFile() - if data and len(data): - return True - - # TODO Workflow - ARImport - Remove - def workflow_before_validate(self): - """This function transposes values from the provided file into the - ARImport object's fields, and checks for invalid values. - - If errors are found: - - Validation transition is aborted. - - Errors are stored on object and displayed to user. - - """ - # Re-set the errors on this ARImport each time validation is attempted. - # When errors are detected they are immediately appended to this field. - self.setErrors([]) - - self.validate_headers() - self.validate_samples() - - if self.getErrors(): - addStatusMessage(self.REQUEST, _p('Validation errors.'), 'error') - transaction.commit() - self.REQUEST.response.write( - '' % ( - self.absolute_url())) - self.REQUEST.response.write( - '' % ( - self.absolute_url())) - - def at_post_edit_script(self): - workflow = getToolByName(self, 'portal_workflow') - trans_ids = [t['id'] for t in workflow.getTransitionsFor(self)] - if 'validate' in trans_ids: - workflow.doActionFor(self, 'validate') - - def workflow_script_import(self): - """Create objects from valid ARImport - """ - bsc = getToolByName(self, 'bika_setup_catalog') - client = self.aq_parent - - title = _('Submitting Sample Import') - description = _('Creating and initialising objects') - bar = ProgressBar(self, self.REQUEST, title, description) - notify(InitialiseProgressBar(bar)) - - profiles = [x.getObject() for x in bsc(portal_type='AnalysisProfile')] - - gridrows = self.schema['SampleData'].get(self) - row_cnt = 0 - for therow in gridrows: - row = deepcopy(therow) - row_cnt += 1 - - # Profiles are titles, profile keys, or UIDS: convert them to UIDs. - newprofiles = [] - for title in row['Profiles']: - objects = [x for x in profiles - if title in (x.getProfileKey(), x.UID(), x.Title())] - for obj in objects: - newprofiles.append(obj.UID()) - row['Profiles'] = newprofiles - - # Same for analyses - newanalyses = set(self.get_row_services(row) + - self.get_row_profile_services(row)) - # get batch - batch = self.schema['Batch'].get(self) - if batch: - row['Batch'] = batch.UID() - # Add AR fields from schema into this row's data - row['ClientReference'] = self.getClientReference() - row['ClientOrderNumber'] = self.getClientOrderNumber() - contact_uid =\ - self.getContact().UID() if self.getContact() else None - row['Contact'] = contact_uid - # Creating analysis request from gathered data - ar = create_analysisrequest( - client, - self.REQUEST, - row, - analyses=list(newanalyses),) - - # progress marker update - progress_index = float(row_cnt) / len(gridrows) * 100 - progress = ProgressState(self.REQUEST, progress_index) - notify(UpdateProgressEvent(progress)) - - # document has been written to, and redirect() fails here - self.REQUEST.response.write( - '' % ( - self.absolute_url())) - - def get_header_values(self): - """Scrape the "Header" values from the original input file - """ - lines = self.getOriginalFile().data.splitlines() - reader = csv.reader(lines) - header_fields = header_data = [] - for row in reader: - if not any(row): - continue - if row[0].strip().lower() == 'header': - header_fields = [x.strip() for x in row][1:] - continue - if row[0].strip().lower() == 'header data': - header_data = [x.strip() for x in row][1:] - break - if not (header_data or header_fields): - return None - if not (header_data and header_fields): - self.error("File is missing header row or header data") - return None - # inject us out of here - values = dict(zip(header_fields, header_data)) - # blank cell from sheet will probably make it in here: - if '' in values: - del (values['']) - return values - - def save_header_data(self): - """Save values from the file's header row into their schema fields. - """ - client = self.aq_parent - - headers = self.get_header_values() - if not headers: - return False - - # Plain header fields that can be set into plain schema fields: - for h, f in [ - ('File name', 'Filename'), - ('No of Samples', 'NrSamples'), - ('Client name', 'ClientName'), - ('Client ID', 'ClientID'), - ('Client Order Number', 'ClientOrderNumber'), - ('Client Reference', 'ClientReference') - ]: - v = headers.get(h, None) - if v: - field = self.schema[f] - field.set(self, v) - del (headers[h]) - - # Primary Contact - v = headers.get('Contact', None) - contacts = [x for x in client.objectValues('Contact')] - contact = [c for c in contacts if c.Title() == v] - if contact: - self.schema['Contact'].set(self, contact) - else: - self.error("Specified contact '%s' does not exist; using '%s'"% - (v, contacts[0].Title())) - self.schema['Contact'].set(self, contacts[0]) - del (headers['Contact']) - - # CCContacts - field_value = { - 'CCNamesReport': '', - 'CCEmailsReport': '', - 'CCNamesInvoice': '', - 'CCEmailsInvoice': '' - } - for h, f in [ - # csv header name DataGrid Column ID - ('CC Names - Report', 'CCNamesReport'), - ('CC Emails - Report', 'CCEmailsReport'), - ('CC Names - Invoice', 'CCNamesInvoice'), - ('CC Emails - Invoice', 'CCEmailsInvoice'), - ]: - if h in headers: - values = [x.strip() for x in headers.get(h, '').split(",")] - field_value[f] = values if values else '' - del (headers[h]) - self.schema['CCContacts'].set(self, [field_value]) - - if headers: - unexpected = ','.join(headers.keys()) - self.error("Unexpected header fields: %s" % unexpected) - - def get_sample_values(self): - """Read the rows specifying Samples and return a dictionary with - related data. - - keys are: - headers - row with "Samples" in column 0. These headers are - used as dictionary keys in the rows below. - prices - Row with "Analysis Price" in column 0. - total_analyses - Row with "Total analyses" in colmn 0 - price_totals - Row with "Total price excl Tax" in column 0 - samples - All other sample rows. - - """ - res = {'samples': []} - lines = self.getOriginalFile().data.splitlines() - reader = csv.reader(lines) - next_rows_are_sample_rows = False - for row in reader: - if not any(row): - continue - if next_rows_are_sample_rows: - vals = [x.strip() for x in row] - if not any(vals): - continue - res['samples'].append(zip(res['headers'], vals)) - elif row[0].strip().lower() == 'samples': - res['headers'] = [x.strip() for x in row] - elif row[0].strip().lower() == 'analysis price': - res['prices'] = \ - zip(res['headers'], [x.strip() for x in row]) - elif row[0].strip().lower() == 'total analyses': - res['total_analyses'] = \ - zip(res['headers'], [x.strip() for x in row]) - elif row[0].strip().lower() == 'total price excl tax': - res['price_totals'] = \ - zip(res['headers'], [x.strip() for x in row]) - next_rows_are_sample_rows = True - return res - - def save_sample_data(self): - """Save values from the file's header row into the DataGrid columns - after doing some very basic validation - """ - bsc = getToolByName(self, 'bika_setup_catalog') - keywords = self.bika_setup_catalog.uniqueValuesFor('getKeyword') - profiles = [] - for p in bsc(portal_type='AnalysisProfile'): - p = p.getObject() - profiles.append(p.Title()) - profiles.append(p.getProfileKey()) - - sample_data = self.get_sample_values() - if not sample_data: - return False - - # columns that we expect, but do not find, are listed here. - # we report on them only once, after looping through sample rows. - missing = set() - - # This contains all sample header rows that were not handled - # by this code - unexpected = set() - - # Save other errors here instead of sticking them directly into - # the field, so that they show up after MISSING and before EXPECTED - errors = [] - - # This will be the new sample-data field value, when we are done. - grid_rows = [] - - row_nr = 0 - for row in sample_data['samples']: - row = dict(row) - row_nr += 1 - - # sid is just for referring the user back to row X in their - # in put spreadsheet - gridrow = {'sid': row['Samples']} - del (row['Samples']) - - # We'll use this later to verify the number against selections - if 'Total number of Analyses or Profiles' in row: - nr_an = row['Total number of Analyses or Profiles'] - del (row['Total number of Analyses or Profiles']) - else: - nr_an = 0 - try: - nr_an = int(nr_an) - except ValueError: - nr_an = 0 - - # TODO this is ignored and is probably meant to serve some purpose. - del (row['Price excl Tax']) - - # ContainerType - not part of sample or AR schema - if 'ContainerType' in row: - title = row['ContainerType'] - if title: - obj = self.lookup(('ContainerType',), - Title=row['ContainerType']) - if obj: - gridrow['ContainerType'] = obj[0].UID - del (row['ContainerType']) - - if 'SampleMatrix' in row: - # SampleMatrix - not part of sample or AR schema - title = row['SampleMatrix'] - if title: - obj = self.lookup(('SampleMatrix',), - Title=row['SampleMatrix']) - if obj: - gridrow['SampleMatrix'] = obj[0].UID - del (row['SampleMatrix']) - - # match against sample schema - for k, v in row.items(): - if k in ['Analyses', 'Profiles']: - continue - if k in sample_schema: - del (row[k]) - if v: - try: - value = self.munge_field_value( - sample_schema, row_nr, k, v) - gridrow[k] = value - except ValueError as e: - errors.append(e.message) - - # match against ar schema - for k, v in row.items(): - if k in ['Analyses', 'Profiles']: - continue - if k in ar_schema: - del (row[k]) - if v: - try: - value = self.munge_field_value( - ar_schema, row_nr, k, v) - gridrow[k] = value - except ValueError as e: - errors.append(e.message) - - # Count and remove Keywords and Profiles from the list - gridrow['Analyses'] = [] - for k, v in row.items(): - if k in keywords: - del (row[k]) - if str(v).strip().lower() not in ('', '0', 'false'): - gridrow['Analyses'].append(k) - gridrow['Profiles'] = [] - for k, v in row.items(): - if k in profiles: - del (row[k]) - if str(v).strip().lower() not in ('', '0', 'false'): - gridrow['Profiles'].append(k) - if len(gridrow['Analyses']) + len(gridrow['Profiles']) != nr_an: - errors.append( - "Row %s: Number of analyses does not match provided value" % - row_nr) - - grid_rows.append(gridrow) - - self.setSampleData(grid_rows) - - if missing: - self.error("SAMPLES: Missing expected fields: %s" % - ','.join(missing)) - - for thing in errors: - self.error(thing) - - if unexpected: - self.error("Unexpected header fields: %s" % - ','.join(unexpected)) - - def get_batch_header_values(self): - """Scrape the "Batch Header" values from the original input file - """ - lines = self.getOriginalFile().data.splitlines() - reader = csv.reader(lines) - batch_headers = batch_data = [] - for row in reader: - if not any(row): - continue - if row[0].strip().lower() == 'batch header': - batch_headers = [x.strip() for x in row][1:] - continue - if row[0].strip().lower() == 'batch data': - batch_data = [x.strip() for x in row][1:] - break - if not (batch_data or batch_headers): - return None - if not (batch_data and batch_headers): - self.error("Missing batch headers or data") - return None - # Inject us out of here - values = dict(zip(batch_headers, batch_data)) - return values - - def create_or_reference_batch(self): - """Save reference to batch, if existing batch specified - Create new batch, if possible with specified values - """ - client = self.aq_parent - batch_headers = self.get_batch_header_values() - if not batch_headers: - return False - # if the Batch's Title is specified and exists, no further - # action is required. We will just set the Batch field to - # use the existing object. - batch_title = batch_headers.get('title', False) - if batch_title: - existing_batch = [x for x in client.objectValues('Batch') - if x.title == batch_title] - if existing_batch: - self.setBatch(existing_batch[0]) - return existing_batch[0] - # If the batch title is specified but does not exist, - # we will attempt to create the bach now. - if 'title' in batch_headers: - if 'id' in batch_headers: - del (batch_headers['id']) - if '' in batch_headers: - del (batch_headers['']) - batch = _createObjectByType('Batch', client, tmpID()) - batch.processForm() - batch.edit(**batch_headers) - self.setBatch(batch) - - def munge_field_value(self, schema, row_nr, fieldname, value): - """Convert a spreadsheet value into a field value that fits in - the corresponding schema field. - - boolean: All values are true except '', 'false', or '0'. - - reference: The title of an object in field.allowed_types; - returns a UID or list of UIDs - - datetime: returns a string value from ulocalized_time - - Tho this is only used during "Saving" of csv data into schema fields, - it will flag 'validation' errors, as this is the only chance we will - get to complain about these field values. - - """ - field = schema[fieldname] - if field.type == 'boolean': - value = str(value).strip().lower() - value = '' if value in ['0', 'no', 'false', 'none'] else '1' - return value - if field.type == 'reference': - value = str(value).strip() - brains = self.lookup(field.allowed_types, Title=value) - if not brains: - brains = self.lookup(field.allowed_types, UID=value) - if not brains: - raise ValueError('Row %s: value is invalid (%s=%s)' % ( - row_nr, fieldname, value)) - if field.multiValued: - return [b.UID for b in brains] if brains else [] - else: - return brains[0].UID if brains else None - if field.type == 'datetime': - try: - value = DateTime(value) - return ulocalized_time( - value, long_format=True, time_only=False, context=self) - except: - raise ValueError('Row %s: value is invalid (%s=%s)' % ( - row_nr, fieldname, value)) - return str(value) - - def validate_headers(self): - """Validate headers fields from schema - """ - - pc = getToolByName(self, 'portal_catalog') - pu = getToolByName(self, "plone_utils") - - client = self.aq_parent - - # Verify Client Name - if self.getClientName() != client.Title(): - self.error("%s: value is invalid (%s)." % ( - 'Client name', self.getClientName())) - - # Verify Client ID - if self.getClientID() != client.getClientID(): - self.error("%s: value is invalid (%s)." % ( - 'Client ID', self.getClientID())) - - existing_arimports = pc(portal_type='ARImport', - review_state=['valid', 'imported']) - # Verify Client Order Number - for arimport in existing_arimports: - if arimport.UID == self.UID() \ - or not arimport.getClientOrderNumber(): - continue - arimport = arimport.getObject() - - if arimport.getClientOrderNumber() == self.getClientOrderNumber(): - self.error('%s: already used by existing ARImport.' % - 'ClientOrderNumber') - break - - # Verify Client Reference - for arimport in existing_arimports: - if arimport.UID == self.UID() \ - or not arimport.getClientReference(): - continue - arimport = arimport.getObject() - if arimport.getClientReference() == self.getClientReference(): - self.error('%s: already used by existing ARImport.' % - 'ClientReference') - break - - # getCCContacts has no value if object is not complete (eg during test) - if self.getCCContacts(): - cc_contacts = self.getCCContacts()[0] - contacts = [x for x in client.objectValues('Contact')] - contact_names = [c.Title() for c in contacts] - # validate Contact existence in this Client - for k in ['CCNamesReport', 'CCNamesInvoice']: - for val in cc_contacts[k]: - if val and val not in contact_names: - self.error('%s: value is invalid (%s)' % (k, val)) - else: - cc_contacts = {'CCNamesReport': [], - 'CCEmailsReport': [], - 'CCNamesInvoice': [], - 'CCEmailsInvoice': [] - } - # validate Contact existence in this Client - for k in ['CCEmailsReport', 'CCEmailsInvoice']: - for val in cc_contacts.get(k, []): - if val and not pu.validateSingleNormalizedEmailAddress(val): - self.error('%s: value is invalid (%s)' % (k, val)) - - def validate_samples(self): - """Scan through the SampleData values and make sure - that each one is correct - """ - - bsc = getToolByName(self, 'bika_setup_catalog') - keywords = bsc.uniqueValuesFor('getKeyword') - profiles = [] - for p in bsc(portal_type='AnalysisProfile'): - p = p.getObject() - profiles.append(p.Title()) - profiles.append(p.getProfileKey()) - - row_nr = 0 - for gridrow in self.getSampleData(): - row_nr += 1 - - # validate against sample and ar schemas - for k, v in gridrow.items(): - if k in ['Analysis', 'Profiles']: - break - if k in sample_schema: - try: - self.validate_against_schema( - sample_schema, row_nr, k, v) - continue - except ValueError as e: - self.error(e.message) - break - if k in ar_schema: - try: - self.validate_against_schema( - ar_schema, row_nr, k, v) - except ValueError as e: - self.error(e.message) - - an_cnt = 0 - for v in gridrow['Analyses']: - if v and v not in keywords: - self.error("Row %s: value is invalid (%s=%s)" % - ('Analysis keyword', row_nr, v)) - else: - an_cnt += 1 - for v in gridrow['Profiles']: - if v and v not in profiles: - self.error("Row %s: value is invalid (%s=%s)" % - ('Profile Title', row_nr, v)) - else: - an_cnt += 1 - if not an_cnt: - self.error("Row %s: No valid analyses or profiles" % row_nr) - - def validate_against_schema(self, schema, row_nr, fieldname, value): - """ - """ - field = schema[fieldname] - if field.type == 'boolean': - value = str(value).strip().lower() - return value - if field.type == 'reference': - value = str(value).strip() - if field.required and not value: - raise ValueError("Row %s: %s field requires a value" % ( - row_nr, fieldname)) - if not value: - return value - brains = self.lookup(field.allowed_types, UID=value) - if not brains: - raise ValueError("Row %s: value is invalid (%s=%s)" % ( - row_nr, fieldname, value)) - if field.multiValued: - return [b.UID for b in brains] if brains else [] - else: - return brains[0].UID if brains else None - if field.type == 'datetime': - try: - ulocalized_time(DateTime(value), long_format=True, - time_only=False, context=self) - except: - raise ValueError('Row %s: value is invalid (%s=%s)' % ( - row_nr, fieldname, value)) - return value - - def lookup(self, allowed_types, **kwargs): - """Lookup an object of type (allowed_types). kwargs is sent - directly to the catalog. - """ - at = getToolByName(self, 'archetype_tool') - for portal_type in allowed_types: - catalog = at.catalog_map.get(portal_type, [None])[0] - catalog = getToolByName(self, catalog) - kwargs['portal_type'] = portal_type - brains = catalog(**kwargs) - if brains: - return brains - - def get_row_services(self, row): - """Return a list of services which are referenced in Analyses. - values may be UID, Title or Keyword. - """ - bsc = getToolByName(self, 'bika_setup_catalog') - services = set() - for val in row.get('Analyses', []): - brains = bsc(portal_type='AnalysisService', getKeyword=val) - if not brains: - brains = bsc(portal_type='AnalysisService', title=val) - if not brains: - brains = bsc(portal_type='AnalysisService', UID=val) - if brains: - services.add(brains[0].UID) - else: - self.error("Invalid analysis specified: %s" % val) - return list(services) - - def get_row_profile_services(self, row): - """Return a list of services which are referenced in profiles - values may be UID, Title or ProfileKey. - """ - bsc = getToolByName(self, 'bika_setup_catalog') - services = set() - profiles = [x.getObject() for x in bsc(portal_type='AnalysisProfile')] - for val in row.get('Profiles', []): - objects = [x for x in profiles - if val in (x.getProfileKey(), x.UID(), x.Title())] - if objects: - for service in objects[0].getService(): - services.add(service.UID()) - else: - self.error("Invalid profile specified: %s" % val) - return list(services) - - def get_row_container(self, row): - """Return a sample container - """ - bsc = getToolByName(self, 'bika_setup_catalog') - val = row.get('Container', False) - if val: - brains = bsc(portal_type='Container', UID=row['Container']) - if brains: - brains[0].getObject() - brains = bsc(portal_type='ContainerType', UID=row['Container']) - if brains: - # XXX Cheating. The calculation of capacity vs. volume is not done. - return brains[0].getObject() - return None - - def get_row_profiles(self, row): - bsc = getToolByName(self, 'bika_setup_catalog') - profiles = [] - for profile_title in row.get('Profiles', []): - profile = bsc(portal_type='AnalysisProfile', title=profile_title) - profiles.append(profile) - return profiles - - def Vocabulary_SamplePoint(self): - vocabulary = CatalogVocabulary(self) - vocabulary.catalog = 'bika_setup_catalog' - folders = [self.bika_setup.bika_samplepoints] - if IClient.providedBy(self.aq_parent): - folders.append(self.aq_parent) - return vocabulary(allow_blank=True, portal_type='SamplePoint') - - def Vocabulary_SampleMatrix(self): - vocabulary = CatalogVocabulary(self) - vocabulary.catalog = 'bika_setup_catalog' - return vocabulary(allow_blank=True, portal_type='SampleMatrix') - - def Vocabulary_SampleType(self): - vocabulary = CatalogVocabulary(self) - vocabulary.catalog = 'bika_setup_catalog' - folders = [self.bika_setup.bika_sampletypes] - if IClient.providedBy(self.aq_parent): - folders.append(self.aq_parent) - return vocabulary(allow_blank=True, portal_type='SampleType') - - def Vocabulary_ContainerType(self): - vocabulary = CatalogVocabulary(self) - vocabulary.catalog = 'bika_setup_catalog' - return vocabulary(allow_blank=True, portal_type='ContainerType') - - def error(self, msg): - errors = list(self.getErrors()) - errors.append(msg) - self.setErrors(errors) - - -atapi.registerType(ARImport, PROJECTNAME) diff --git a/bika/lims/content/bikacache.py b/bika/lims/content/bikacache.py deleted file mode 100644 index 21114b8a6b..0000000000 --- a/bika/lims/content/bikacache.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of SENAITE.CORE. -# -# SENAITE.CORE is free software: you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation, version 2. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., 51 -# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Copyright 2018-2020 by it's authors. -# Some rights reserved, see README and LICENSE. - -from zope.interface import implements -from Products.Archetypes import atapi -from Products.Archetypes.public import BaseContent -from bika.lims.content.bikaschema import BikaSchema -from bika.lims import bikaMessageFactory as _ -from bika.lims import config - - -schema = BikaSchema.copy() + atapi.Schema(( - #'Key' field is name of the Cache object, must be Unique - atapi.StringField('Key',default=''), - - # 'Value' is ID of the last created object. Must be inscreased before using. - atapi.StringField('Value',default='') - -)) - -schema['title'].widget.visible = False - -class BikaCache(BaseContent): - """ - BikaCache objects stores information about 'Last Created ID's of different - types. For each object type, there must be only one Cache object, and the ID - of its Last Created Object. - It is used to avoid querying whole catalog just to get the highest ID for any - kind of object. - """ - schema = schema - -# Activating the content type in Archetypes' internal types registry -atapi.registerType(BikaCache, config.PROJECTNAME) diff --git a/bika/lims/content/bikasetup.py b/bika/lims/content/bikasetup.py index d5464f86d8..46ea7ca27b 100644 --- a/bika/lims/content/bikasetup.py +++ b/bika/lims/content/bikasetup.py @@ -46,7 +46,6 @@ from bika.lims.browser.widgets import DurationWidget from bika.lims.browser.widgets import RecordsWidget from bika.lims.browser.widgets import RejectionSetupWidget -from bika.lims.config import ARIMPORT_OPTIONS from bika.lims.config import ATTACHMENT_OPTIONS from bika.lims.config import CURRENCIES from bika.lims.config import WEEKDAYS @@ -388,19 +387,6 @@ def getCounterTypes(self, instance=None): format='select', ) ), - LinesField( - 'ARImportOption', - schemata="Analyses", - vocabulary=ARIMPORT_OPTIONS, - widget=MultiSelectionWidget( - visible=False, - label=_("AR Import options"), - description=_( - "'Classic' indicates importing samples per sample and " - "analysis service selection. With 'Profiles', analysis profile keywords " - "are used to select multiple analysis services together"), - ) - ), StringField( 'ARAttachmentOption', schemata="Analyses", @@ -717,11 +703,6 @@ def getCounterTypes(self, instance=None): schemata="ID Server", default=[ { - 'form': 'AI-{seq:03d}', - 'portal_type': 'ARImport', - 'sequence_type': 'generated', - 'split_length': 1 - }, { 'form': 'B-{seq:03d}', 'portal_type': 'Batch', 'prefix': 'batch', diff --git a/bika/lims/content/client.py b/bika/lims/content/client.py index f6dc8f5853..030b3e46c4 100644 --- a/bika/lims/content/client.py +++ b/bika/lims/content/client.py @@ -39,7 +39,6 @@ from bika.lims.browser.fields import EmailsField from bika.lims.browser.widgets import ReferenceWidget from bika.lims.catalog.bikasetup_catalog import SETUP_CATALOG -from bika.lims.config import ARIMPORT_OPTIONS from bika.lims.config import DECIMAL_MARKS from bika.lims.config import PROJECTNAME from bika.lims.content.attachment import Attachment @@ -197,11 +196,6 @@ def getContactUIDForUser(self): if len(r) == 1: return r[0].UID - security.declarePublic("getARImportOptions") - - def getARImportOptions(self): - return ARIMPORT_OPTIONS - def getContacts(self, only_active=True): """Return an array containing the contacts from this Client """ diff --git a/bika/lims/content/invoicebatch.py b/bika/lims/content/invoicebatch.py deleted file mode 100644 index 1302ed4b6d..0000000000 --- a/bika/lims/content/invoicebatch.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of SENAITE.CORE. -# -# SENAITE.CORE is free software: you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation, version 2. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., 51 -# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Copyright 2018-2020 by it's authors. -# Some rights reserved, see README and LICENSE. - -from bika.lims.config import PROJECTNAME -from bika.lims.interfaces import IInvoiceBatch -from Products.Archetypes.public import registerType -from Products.Archetypes.public import BaseFolder -from zope.interface import implements - - -class InvoiceBatch(BaseFolder): - """REMOVE AFTER 1.3 - """ - implements(IInvoiceBatch) - - -registerType(InvoiceBatch, PROJECTNAME) diff --git a/bika/lims/content/invoicefolder.py b/bika/lims/content/invoicefolder.py deleted file mode 100644 index a24a33c925..0000000000 --- a/bika/lims/content/invoicefolder.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of SENAITE.CORE. -# -# SENAITE.CORE is free software: you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation, version 2. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., 51 -# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Copyright 2018-2020 by it's authors. -# Some rights reserved, see README and LICENSE. - -from bika.lims.config import PROJECTNAME -from bika.lims.interfaces import IInvoiceFolder -from Products.Archetypes import atapi -from Products.ATContentTypes.content import folder -from zope.interface import implements - - -class InvoiceFolder(folder.ATFolder): - """REMOVE AFTER 1.3 - """ - implements(IInvoiceFolder) - - -atapi.registerType(InvoiceFolder, PROJECTNAME) diff --git a/bika/lims/content/sample.py b/bika/lims/content/sample.py deleted file mode 100644 index 00cdc80010..0000000000 --- a/bika/lims/content/sample.py +++ /dev/null @@ -1,481 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of SENAITE.CORE. -# -# SENAITE.CORE is free software: you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation, version 2. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., 51 -# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Copyright 2018-2020 by it's authors. -# Some rights reserved, see README and LICENSE. - -"""Sample represents a physical sample submitted for testing -""" - -from datetime import timedelta -from AccessControl import ClassSecurityInfo -from bika.lims import bikaMessageFactory as _ -from bika.lims.api import get_object_by_uid -from bika.lims.browser.fields.remarksfield import RemarksField -from bika.lims.browser.fields.uidreferencefield import get_backreferences -from bika.lims.utils import t, getUsers -from Products.ATExtensions.field import RecordsField -from bika.lims.browser.widgets.datetimewidget import DateTimeWidget -from bika.lims.browser.widgets import RejectionWidget -from bika.lims.browser.widgets import RemarksWidget -from bika.lims.config import PROJECTNAME -from bika.lims.content.bikaschema import BikaSchema -from bika.lims.interfaces import ISample -from Products.Archetypes import atapi -from Products.Archetypes.public import * -from Products.Archetypes.references import HoldingReference -from Products.ATContentTypes.lib.historyaware import HistoryAwareMixin -from Products.ATContentTypes.utils import DT2dt, dt2DT -from Products.CMFCore import permissions -from Products.CMFPlone.utils import safe_unicode -from zope.interface import implements - -from bika.lims.browser.fields import DateTimeField -from bika.lims.browser.widgets import ReferenceWidget -from bika.lims.browser.widgets import SelectionWidget as BikaSelectionWidget - -import sys -from bika.lims.utils import to_unicode -from bika.lims.interfaces import IDoNotSupportSnapshots - -schema = BikaSchema.copy() + Schema(( - # TODO This field is only for v1.3.0 migration purposes - # bika_catalog contains an "isValid" index. We will take advantage of this - # index to keep track of the Samples that have been migrated already in - # order to prevent an unnecessary reimport when v1.3.0 is rerun. - # This field is used by `isValid` function - BooleanField('Migrated', - default = False, - ), - StringField('SampleID', - required=1, - searchable=True, - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget=StringWidget( - label=_("Sample ID"), - description=_("The ID assigned to the client's sample by the lab"), - visible=False, - render_own_label=True, - ), - ), - StringField('ClientReference', - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget=StringWidget( - label=_("Client Reference"), - visible=False, - render_own_label=True, - ), - ), - StringField('ClientSampleID', - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget=StringWidget( - label=_("Client SID"), - visible=False, - render_own_label=True, - ), - ), - ReferenceField('SampleType', - required=1, - vocabulary_display_path_bound=sys.maxsize, - allowed_types=('SampleType',), - relationship='SampleSampleType', - referenceClass=HoldingReference, - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget=ReferenceWidget( - label=_("Sample Type"), - render_own_label=True, - visible=False, - catalog_name='bika_setup_catalog', - base_query={'is_active': True}, - showOn=True, - ), - ), - ComputedField('SampleTypeTitle', - expression="here.getSampleType() and here.getSampleType().Title() or ''", - widget=ComputedWidget( - visible=False, - ), - ), - ReferenceField('SamplePoint', - vocabulary_display_path_bound=sys.maxsize, - allowed_types=('SamplePoint',), - relationship = 'SampleSamplePoint', - referenceClass = HoldingReference, - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget=ReferenceWidget( - label=_("Sample Point"), - render_own_label=True, - visible=False, - catalog_name='bika_setup_catalog', - base_query={'is_active': True}, - showOn=True, - ), - ), - ComputedField('SamplePointTitle', - expression = "here.getSamplePoint() and here.getSamplePoint().Title() or ''", - widget = ComputedWidget( - visible=False, - ), - ), - ReferenceField( - 'StorageLocation', - allowed_types='StorageLocation', - relationship='AnalysisRequestStorageLocation', - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget=ReferenceWidget( - label=_("Storage Location"), - description=_("Location where sample is kept"), - size=20, - render_own_label=True, - visible=False, - catalog_name='bika_setup_catalog', - base_query={'is_active': True}, - showOn=True, - ), - ), - BooleanField('SamplingWorkflowEnabled', - default_method='getSamplingWorkflowEnabledDefault' - ), - DateTimeField('DateSampled', - mode="rw", - read_permission=permissions.View, - widget = DateTimeWidget( - label=_("Date Sampled"), - show_time=True, - size=20, - visible=False, - render_own_label=True, - ), - ), - StringField('Sampler', - mode="rw", - read_permission=permissions.View, - vocabulary='getSamplers', - widget=BikaSelectionWidget( - format='select', - label=_("Sampler"), - visible=False, - render_own_label=True, - ), - ), - StringField('ScheduledSamplingSampler', - mode="rw", - read_permission=permissions.View, - vocabulary='getSamplers', - widget=BikaSelectionWidget( - description=_("Define the sampler supposed to do the sample in " - "the scheduled date"), - format='select', - label=_("Sampler for scheduled sampling"), - visible=False, - render_own_label=True, - ), - ), - DateTimeField('SamplingDate', - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget = DateTimeWidget( - label=_("Expected Sampling Date"), - description=_("Define when the sampler has to take the samples"), - show_time=True, - visible=False, - render_own_label=True, - ), - ), - ReferenceField('SamplingDeviation', - vocabulary_display_path_bound = sys.maxsize, - allowed_types = ('SamplingDeviation',), - relationship = 'SampleSamplingDeviation', - referenceClass = HoldingReference, - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget=ReferenceWidget( - label=_("Sampling Deviation"), - render_own_label=True, - visible=False, - catalog_name='bika_setup_catalog', - base_query={'is_active': True}, - showOn=True, - ), - ), - ReferenceField('SampleCondition', - vocabulary_display_path_bound = sys.maxsize, - allowed_types = ('SampleCondition',), - relationship = 'SampleSampleCondition', - referenceClass = HoldingReference, - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget=ReferenceWidget( - label=_("Sample Condition"), - render_own_label=True, - visible=False, - catalog_name='bika_setup_catalog', - base_query={'is_active': True}, - showOn=True, - ), - ), - StringField( - 'EnvironmentalConditions', - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget=StringWidget( - label=_("Environmental Conditions"), - visible=False, - render_own_label=True, - size=20, - ), - ), - # Another way to obtain a transition date is using getTransitionDate - # function. We are using a DateTimeField/Widget here because in some - # cases the user may want to change the Received Date. - # AnalysisRequest and Sample's DateReceived fields needn't to have - # the same value. - # This field is updated in workflow_script_receive method. - DateTimeField('DateReceived', - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget = DateTimeWidget( - label=_("Date Received"), - show_time=True, - datepicker_nofuture=1, - visible=False, - render_own_label=True, - ), - ), - ComputedField('ClientUID', - expression = 'context.aq_parent.UID()', - widget = ComputedWidget( - visible=False, - ), - ), - ComputedField('SampleTypeUID', - expression='context.getSampleType() and \ - context.getSampleType().UID() or None', - widget=ComputedWidget( - visible=False, - ), - ), - ComputedField('SamplePointUID', - expression = 'context.getSamplePoint() and context.getSamplePoint().UID() or None', - widget = ComputedWidget( - visible=False, - ), - ), - BooleanField('Composite', - default = False, - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget = BooleanWidget( - label=_("Composite"), - visible=False, - render_own_label=True, - ), - ), - DateTimeField('DateExpired', - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget = DateTimeWidget( - label=_("Date Expired"), - visible=False, - render_own_label=True, - ), - ), - ComputedField('DisposalDate', - expression = 'context.disposal_date()', - widget=DateTimeWidget( - visible=False, - render_own_label=True, - ), - ), - DateTimeField('DateDisposed', - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget = DateTimeWidget( - label=_("Date Disposed"), - visible=False, - render_own_label=True, - ), - ), - BooleanField('AdHoc', - default=False, - mode="rw", - read_permission=permissions.View, - write_permission=permissions.ModifyPortalContent, - widget=BooleanWidget( - label=_("Ad-Hoc"), - visible=False, - render_own_label=True, - ), - ), - RemarksField( - 'Remarks', - widget=RemarksWidget( - label=_("Remarks"), - ), - ), - RecordsField( - 'RejectionReasons', - widget = RejectionWidget( - label=_("Sample Rejection"), - description = _("Set the Sample Rejection workflow and the reasons"), - render_own_label=False, - visible=False, - ), - ), -)) - - -schema['title'].required = False - - -class Sample(BaseFolder, HistoryAwareMixin): - implements(ISample, IDoNotSupportSnapshots) - security = ClassSecurityInfo() - displayContentsTab = False - schema = schema - - _at_rename_after_creation = True - - def _renameAfterCreation(self, check_auto_id=False): - from bika.lims.idserver import renameAfterCreation - renameAfterCreation(self) - - def _getCatalogTool(self): - from bika.lims.catalog import getCatalog - return getCatalog(self) - - def getSampleID(self): - """ Return the Sample ID as title """ - return safe_unicode(self.getId()).encode('utf-8') - - def Title(self): - """ Return the Sample ID as title """ - return self.getSampleID() - - def getSamplingWorkflowEnabledDefault(self): - return self.bika_setup.getSamplingWorkflowEnabled() - - def getContactTitle(self): - return "" - - def getClientTitle(self): - proxies = self.getAnalysisRequests() - if not proxies: - return "" - value = proxies[0].aq_parent.Title() - return value - - def getProfilesTitle(self): - return "" - - def getAnalysisService(self): - analyses = [] - for ar in self.getAnalysisRequests(): - analyses += list(ar.getAnalyses(full_objects=True)) - value = [] - for analysis in analyses: - val = analysis.Title() - if val not in value: - value.append(val) - return value - - def getAnalysts(self): - analyses = [] - for ar in self.getAnalysisRequests(): - analyses += list(ar.getAnalyses(full_objects=True)) - value = [] - for analysis in analyses: - val = analysis.getAnalyst() - if val not in value: - value.append(val) - return value - - security.declarePublic('getAnalysisRequests') - - def getAnalysisRequests(self): - backrefs = get_backreferences(self, 'AnalysisRequestSample') - ars = map(get_object_by_uid, backrefs) - return ars - - security.declarePublic('getAnalyses') - - def getAnalyses(self, contentFilter=None, **kwargs): - """ return list of all analyses against this sample - """ - # contentFilter and kwargs are combined. They both exist for - # compatibility between the two signatures; kwargs has been added - # to be compatible with how getAnalyses() is used everywhere else. - cf = contentFilter if contentFilter else {} - cf.update(kwargs) - analyses = [] - for ar in self.getAnalysisRequests(): - analyses.extend(ar.getAnalyses(**cf)) - return analyses - - def getSamplers(self): - return getUsers(self, ['Sampler', ]) - - def disposal_date(self): - """Returns the date the retention period ends for this sample based on - the retention period from the Sample Type. If the sample hasn't been - collected yet, returns None - """ - date_sampled = self.getDateSampled() - if not date_sampled: - return None - - # TODO Preservation - preservation's retention period has priority over - # sample type's preservation period - - retention_period = self.getSampleType().getRetentionPeriod() or {} - retention_period_delta = timedelta( - days=int(retention_period.get("days", 0)), - hours=int(retention_period.get("hours", 0)), - minutes=int(retention_period.get("minutes", 0)) - ) - return dt2DT(DT2dt(date_sampled) + retention_period_delta) - - - # TODO This method is only for v1.3.0 migration purposes - # bika_catalog contains an "isValid" index. We will take advantage of this - # index to keep track of the Samples that have been migrated already in - # order to prevent an unnecessary reimport when v1.3.0 is rerun. - def isValid(self): - return self.getMigrated() - - -atapi.registerType(Sample, PROJECTNAME) diff --git a/bika/lims/content/samplepartition.py b/bika/lims/content/samplepartition.py deleted file mode 100644 index 3df9a60d18..0000000000 --- a/bika/lims/content/samplepartition.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of SENAITE.CORE. -# -# SENAITE.CORE is free software: you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation, version 2. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., 51 -# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Copyright 2018-2020 by it's authors. -# Some rights reserved, see README and LICENSE. - - -from AccessControl import ClassSecurityInfo -from Products.ATContentTypes.lib.historyaware import HistoryAwareMixin -from Products.Archetypes.public import BaseContent -from Products.Archetypes.public import BooleanField -from Products.Archetypes.public import DateTimeField -from Products.Archetypes.public import ReferenceField -from Products.Archetypes.public import Schema -from Products.Archetypes.public import StringField -from Products.Archetypes.public import registerType -from Products.CMFPlone.utils import safe_unicode -from bika.lims.browser.fields import DurationField -from bika.lims.browser.fields import UIDReferenceField -from bika.lims.config import PROJECTNAME -from bika.lims.content.bikaschema import BikaSchema -from bika.lims.interfaces import ISamplePartition -from zope.interface import implements -from bika.lims.interfaces import IDoNotSupportSnapshots - -schema = BikaSchema.copy() + Schema(( - ReferenceField('Container', - allowed_types=('Container',), - relationship='SamplePartitionContainer', - required=1, - multiValued=0, - ), - ReferenceField('Preservation', - allowed_types=('Preservation',), - relationship='SamplePartitionPreservation', - required=0, - multiValued=0, - ), - BooleanField('Separate', - default=False - ), - UIDReferenceField('Analyses', - allowed_types=('Analysis',), - required=0, - multiValued=1, - ), - DateTimeField('DatePreserved', - ), - StringField('Preserver', - searchable=True - ), - DurationField('RetentionPeriod', - ), -) -) - -schema['title'].required = False - - -class SamplePartition(BaseContent, HistoryAwareMixin): - implements(ISamplePartition, IDoNotSupportSnapshots) - security = ClassSecurityInfo() - displayContentsTab = False - schema = schema - - _at_rename_after_creation = True - - def _renameAfterCreation(self, check_auto_id=False): - from bika.lims.idserver import renameAfterCreation - renameAfterCreation(self) - - def Title(self): - """ Return the Sample ID as title """ - return safe_unicode(self.getId()).encode('utf-8') - - -registerType(SamplePartition, PROJECTNAME) diff --git a/bika/lims/content/samplesfolder.py b/bika/lims/content/samplesfolder.py deleted file mode 100644 index f134f9e49f..0000000000 --- a/bika/lims/content/samplesfolder.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of SENAITE.CORE. -# -# SENAITE.CORE is free software: you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation, version 2. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., 51 -# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Copyright 2018-2020 by it's authors. -# Some rights reserved, see README and LICENSE. - -"""SamplesFolder is a fake folder to live in the nav bar. It has -view from browser/sample.py/SamplesView wired to it. -""" -from Products.ATContentTypes.content import schemata -from Products.Archetypes import atapi -from Products.CMFCore import permissions -from Products.CMFCore.utils import getToolByName -from bika.lims.config import PROJECTNAME -from AccessControl import ClassSecurityInfo -from bika.lims.interfaces import ISamplesFolder, IHaveNoBreadCrumbs -from plone.app.folder import folder -from zope.interface import implements -from bika.lims import bikaMessageFactory as _ -from bika.lims.utils import t - -schema = folder.ATFolderSchema.copy() - -class SamplesFolder(folder.ATFolder): - implements(ISamplesFolder, IHaveNoBreadCrumbs) - displayContentsTab = False - schema = schema - security = ClassSecurityInfo() - -schemata.finalizeATCTSchema(schema, folderish = True, moveDiscussion = False) - -atapi.registerType(SamplesFolder, PROJECTNAME) diff --git a/bika/lims/interfaces/__init__.py b/bika/lims/interfaces/__init__.py index 78cb9e5ad0..fde2a5415c 100644 --- a/bika/lims/interfaces/__init__.py +++ b/bika/lims/interfaces/__init__.py @@ -194,11 +194,6 @@ class IReportFolder(Interface): """Report folder """ -# TODO Remove in >v1.3.0 -class ISample(Interface): - """Sample - """ - class ISampleCondition(Interface): """Sample Condition @@ -220,16 +215,6 @@ class ISampleMatrices(Interface): """ -class ISamplePartition(Interface): - """Sample - """ - - -class ISamplesFolder(Interface): - """Samples Folder - """ - - class ISamplingDeviation(Interface): """Sampling Deviation """ @@ -270,16 +255,6 @@ class IInvoice(Interface): """ -class IInvoiceBatch(Interface): - """Invoice Batch - """ - - -class IInvoiceFolder(Interface): - """Invoices Folder - """ - - class IBikaSetup(Interface): """Marker interface for the LIMS Setup """ @@ -745,16 +720,6 @@ class ISetupDataImporter(Interface): """ -class IARImportFolder(Interface): - """Marker interface for a folder that contains ARImports - """ - - -class IARImport(Interface): - """Marker interface for an ARImport - """ - - class IPricelist(Interface): """Folder view marker for Pricelist """ diff --git a/bika/lims/monkey/contentmenu.py b/bika/lims/monkey/contentmenu.py index fadfc532c2..f830799c90 100644 --- a/bika/lims/monkey/contentmenu.py +++ b/bika/lims/monkey/contentmenu.py @@ -23,7 +23,6 @@ def contentmenu_factories_available(self): """ if hasattr(self._addContext(), 'portal_type') \ and self._addContext().portal_type in [ - 'ARImport', 'Batch', 'Client', 'AnalysisRequest', diff --git a/bika/lims/permissions.py b/bika/lims/permissions.py index 9300c65cfd..73338f2439 100644 --- a/bika/lims/permissions.py +++ b/bika/lims/permissions.py @@ -181,7 +181,6 @@ EditWorksheet = "senaite.core: Edit Worksheet" ManageBika = "senaite.core: Manage Bika" ManageAnalysisRequests = "senaite.core: Manage Analysis Requests" -ManageARImport = "senaite.core: Manage ARImport" ManageInvoices = "senaite.core: Manage Invoices" ManageLoginDetails = "senaite.core: Manage Login Details" ManageReference = "senaite.core: Manage Reference" diff --git a/bika/lims/permissions.zcml b/bika/lims/permissions.zcml index fd54bde24f..d74e03db6b 100644 --- a/bika/lims/permissions.zcml +++ b/bika/lims/permissions.zcml @@ -152,7 +152,6 @@ - diff --git a/bika/lims/profiles/default/factorytool.xml b/bika/lims/profiles/default/factorytool.xml index fe96653ce9..83545eae20 100644 --- a/bika/lims/profiles/default/factorytool.xml +++ b/bika/lims/profiles/default/factorytool.xml @@ -1,7 +1,6 @@ - @@ -31,8 +30,6 @@ - - @@ -43,7 +40,6 @@ - diff --git a/bika/lims/profiles/default/propertiestool.xml b/bika/lims/profiles/default/propertiestool.xml index b687c799e4..ac55c65ebe 100644 --- a/bika/lims/profiles/default/propertiestool.xml +++ b/bika/lims/profiles/default/propertiestool.xml @@ -37,7 +37,6 @@ - @@ -60,7 +59,6 @@ - @@ -76,8 +74,6 @@ - - @@ -104,7 +100,6 @@ - False diff --git a/bika/lims/profiles/default/rolemap.xml b/bika/lims/profiles/default/rolemap.xml index f9eaef33e7..76c86aa8e9 100644 --- a/bika/lims/profiles/default/rolemap.xml +++ b/bika/lims/profiles/default/rolemap.xml @@ -831,11 +831,6 @@ - - - - - diff --git a/bika/lims/profiles/default/types.xml b/bika/lims/profiles/default/types.xml index 2c3121765f..bb8b017c30 100644 --- a/bika/lims/profiles/default/types.xml +++ b/bika/lims/profiles/default/types.xml @@ -2,7 +2,6 @@ Controls the available content types in your portal - @@ -24,7 +23,6 @@ - @@ -50,8 +48,6 @@ - - @@ -76,17 +72,14 @@ - - - diff --git a/bika/lims/profiles/default/types/ARImport.xml b/bika/lims/profiles/default/types/ARImport.xml deleted file mode 100644 index facf8b8055..0000000000 --- a/bika/lims/profiles/default/types/ARImport.xml +++ /dev/null @@ -1,48 +0,0 @@ - - - ARImport - - ++resource++bika.lims.images/arimport.png - ARImport - bika.lims - addARImport - False - True - - - False - arimport_view - - - - - - - - - - - - - - diff --git a/bika/lims/profiles/default/types/BikaCache.xml b/bika/lims/profiles/default/types/BikaCache.xml deleted file mode 100644 index ad260a6042..0000000000 --- a/bika/lims/profiles/default/types/BikaCache.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - BikaCache - A contenttype to keep some necessary variables - BikaCache - bika.lims - addBikaCache - True - True - True - - diff --git a/bika/lims/profiles/default/types/Client.xml b/bika/lims/profiles/default/types/Client.xml index ec6dca0c6b..4507ff3640 100644 --- a/bika/lims/profiles/default/types/Client.xml +++ b/bika/lims/profiles/default/types/Client.xml @@ -16,7 +16,6 @@ False True - @@ -85,18 +84,6 @@ - - - - - - - ++resource++bika.lims.images/invoice.png - InvoiceBatch - bika.lims - addInvoiceBatch - - - False - True - - - - False - False - base_view - - - - - - - - - - - - - - - - - - - diff --git a/bika/lims/profiles/default/types/InvoiceFolder.xml b/bika/lims/profiles/default/types/InvoiceFolder.xml deleted file mode 100644 index 0aacd64f90..0000000000 --- a/bika/lims/profiles/default/types/InvoiceFolder.xml +++ /dev/null @@ -1,26 +0,0 @@ - - - Statements - - ++resource++bika.lims.images/invoice.png - InvoiceFolder - bika.lims - addInvoiceFolder - - - False - True - - - - False - False - - - - - diff --git a/bika/lims/profiles/default/types/Sample.xml b/bika/lims/profiles/default/types/Sample.xml deleted file mode 100644 index 63a8c2e76c..0000000000 --- a/bika/lims/profiles/default/types/Sample.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - Sample - - ++resource++bika.lims.images/sample.png - Sample - bika.lims - addSample - - - False - True - - - - False - False - base_view - - - diff --git a/bika/lims/profiles/default/types/SamplePartition.xml b/bika/lims/profiles/default/types/SamplePartition.xml deleted file mode 100644 index 166bed5a88..0000000000 --- a/bika/lims/profiles/default/types/SamplePartition.xml +++ /dev/null @@ -1,23 +0,0 @@ - - - Sample Partition - - ++resource++bika.lims.images/samplepartition.png - SamplePartition - bika.lims - addSamplePartition - - - False - True - - False - False - base_view - - - diff --git a/bika/lims/profiles/default/types/SamplesFolder.xml b/bika/lims/profiles/default/types/SamplesFolder.xml deleted file mode 100644 index 12e2455474..0000000000 --- a/bika/lims/profiles/default/types/SamplesFolder.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - SamplesFolder - - ++resource++bika.lims.images/sample.png - SamplesFolder - bika.lims - addSamplesFolder - - - False - True - - False - False - - - - - - - - - - diff --git a/bika/lims/profiles/default/workflows.xml b/bika/lims/profiles/default/workflows.xml index e27d2369a9..7555060146 100644 --- a/bika/lims/profiles/default/workflows.xml +++ b/bika/lims/profiles/default/workflows.xml @@ -4,7 +4,6 @@ name="title">Contains workflow definitions for your portal - @@ -435,12 +434,6 @@ - - - - - - diff --git a/bika/lims/profiles/default/workflows/bika_arimport_workflow/definition.xml b/bika/lims/profiles/default/workflows/bika_arimport_workflow/definition.xml deleted file mode 100644 index 731ae02331..0000000000 --- a/bika/lims/profiles/default/workflows/bika_arimport_workflow/definition.xml +++ /dev/null @@ -1,116 +0,0 @@ - - - - Modify portal content - senaite.core: Manage ARImport - - - - - LabClerk - LabManager - Manager - - - LabClerk - LabManager - Manager - - - - - - - LabClerk - LabManager - Manager - - - LabClerk - LabManager - Manager - - - - - - - LabClerk - LabManager - Manager - - - LabClerk - LabManager - Manager - - - - - Import - - senaite.core: Manage ARImport - - - - - Validate - - senaite.core: Manage ARImport - python:here.guard_validate_transition() - - - - - Previous transition - - transition/getId|nothing - - - - - - - The ID of the user who performed the last transition - - user/getId - - - - - - - Comment about the last transition - - python:state_change.kwargs.get('comment', '') - - - - - - - Provides access to workflow history - - state_change/getHistory - - - - - - - When the previous transition was performed - - state_change/getDateTime - - - - - - diff --git a/bika/lims/profiles/default/workflows/bika_arimports_workflow/definition.xml b/bika/lims/profiles/default/workflows/bika_arimports_workflow/definition.xml deleted file mode 100644 index 98b9bd1304..0000000000 --- a/bika/lims/profiles/default/workflows/bika_arimports_workflow/definition.xml +++ /dev/null @@ -1,100 +0,0 @@ - - - - senaite.core: Manage ARImport - List folder contents - Add portal content - Delete objects - View - - - - - - - - - LabClerk - LabManager - Manager - - - - - Member - Manager - LabManager - - - - - Manager - LabManager - - - - - Manager - - - - - Member - Manager - - - - - - Previous transition - - transition/getId|nothing - - - - - - - The ID of the user who performed the last transition - - user/getId - - - - - - - Comment about the last transition - - python:state_change.kwargs.get('comment', '') - - - - - - - Provides access to workflow history - - state_change/getHistory - - - - - - - When the previous transition was performed - - state_change/getDateTime - - - - - - diff --git a/bika/lims/setuphandlers.py b/bika/lims/setuphandlers.py index 93b58bb692..277ee69cf5 100644 --- a/bika/lims/setuphandlers.py +++ b/bika/lims/setuphandlers.py @@ -79,7 +79,6 @@ NAV_BAR_ITEMS_TO_HIDE = ( # List of items to hide from navigation bar - "arimports", "pricelists", "supplyorders", ) diff --git a/bika/lims/skins/bika/cg-images/accept.png b/bika/lims/skins/bika/cg-images/accept.png deleted file mode 100755 index 89c8129a49..0000000000 Binary files a/bika/lims/skins/bika/cg-images/accept.png and /dev/null differ diff --git a/bika/lims/skins/bika/cg-images/exclamation.png b/bika/lims/skins/bika/cg-images/exclamation.png deleted file mode 100755 index c37bd062e6..0000000000 Binary files a/bika/lims/skins/bika/cg-images/exclamation.png and /dev/null differ diff --git a/bika/lims/skins/bika/cg-images/loading.gif b/bika/lims/skins/bika/cg-images/loading.gif deleted file mode 100755 index e2dda32042..0000000000 Binary files a/bika/lims/skins/bika/cg-images/loading.gif and /dev/null differ diff --git a/bika/lims/skins/bika/cg-images/magnifier.png b/bika/lims/skins/bika/cg-images/magnifier.png deleted file mode 100755 index 8c2b5d8631..0000000000 Binary files a/bika/lims/skins/bika/cg-images/magnifier.png and /dev/null differ diff --git a/bika/lims/tests/doctests/API_analysis.rst b/bika/lims/tests/doctests/API_analysis.rst index d699a012b9..3524dddd07 100644 --- a/bika/lims/tests/doctests/API_analysis.rst +++ b/bika/lims/tests/doctests/API_analysis.rst @@ -20,7 +20,6 @@ Needed Imports: >>> from bika.lims.api.analysis import get_formatted_interval >>> from bika.lims.api.analysis import is_out_of_range >>> from bika.lims.content.analysisrequest import AnalysisRequest - >>> from bika.lims.content.sample import Sample >>> from bika.lims.utils.analysisrequest import create_analysisrequest >>> from bika.lims.utils import tmpID >>> from bika.lims.workflow import doActionFor diff --git a/bika/lims/tests/doctests/AnalysisTurnaroundTime.rst b/bika/lims/tests/doctests/AnalysisTurnaroundTime.rst index a0201fe147..e48373f4e4 100644 --- a/bika/lims/tests/doctests/AnalysisTurnaroundTime.rst +++ b/bika/lims/tests/doctests/AnalysisTurnaroundTime.rst @@ -17,7 +17,6 @@ Needed Imports: >>> from bika.lims.api.analysis import get_formatted_interval >>> from bika.lims.api.analysis import is_out_of_range >>> from bika.lims.content.analysisrequest import AnalysisRequest - >>> from bika.lims.content.sample import Sample >>> from bika.lims.utils.analysisrequest import create_analysisrequest >>> from bika.lims.utils import tmpID >>> from bika.lims.workflow import doActionFor diff --git a/bika/lims/tests/doctests/WorksheetApplyTemplate.rst b/bika/lims/tests/doctests/WorksheetApplyTemplate.rst index 82c9bbc4c7..ebe59b3bc9 100644 --- a/bika/lims/tests/doctests/WorksheetApplyTemplate.rst +++ b/bika/lims/tests/doctests/WorksheetApplyTemplate.rst @@ -33,7 +33,6 @@ Needed Imports: >>> from AccessControl.PermissionRole import rolesForPermissionOn >>> from bika.lims import api >>> from bika.lims.content.analysisrequest import AnalysisRequest - >>> from bika.lims.content.sample import Sample >>> from bika.lims.utils.analysisrequest import create_analysisrequest >>> from bika.lims.utils import tmpID >>> from bika.lims.workflow import doActionFor diff --git a/bika/lims/tests/files/BikaARImportTemplate.csv b/bika/lims/tests/files/BikaARImportTemplate.csv deleted file mode 100644 index a4b17c39ca..0000000000 --- a/bika/lims/tests/files/BikaARImportTemplate.csv +++ /dev/null @@ -1,18 +0,0 @@ -Header,File name,Client name,Client ID,Contact,CC Names - Report,CC Emails - Report,CC Names - Invoice,CC Emails - Invoice,Client Order Number,Client Reference,No of Samples,,,,,,,,,,,,,,,,,,,, -Header Data,BikaARImportTemplate,Happy Hills,HH,Rita Mohale,,,,,,,10,,,,,,,,,,,,,,,,,,,, -Batch Header,title,description,ClientBatchID,ClientBatchComment,BatchLabels,ReturnSampleToClient,,,,,,,,,,,,,,,,,,,,,,,,, -Batch Data,Happy Hills Survey 13,,CC 201506,Routine monthly safety measure,Commercial,1,,,,,,,,,,,,,,,,,,,,,,,,, -Samples,ClientSampleID,SamplingDate,DateSampled,SamplePoint,Activity SampledĀ ,Amount Sampled,Metric,SampleMatrix,SampleType,ContainerType,ReportDryMatter,Priority,Total number of Analyses or Profiles,Price excl Tax,PTM,TH,Mbcheck,Mbcount,Temp,Test,Ca,Enterocnt,pHField,Conductivity,Zn,pH,Ecoli,Fe,Entero,THCaCO3,Cu -Analysis price,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -Total Analyses or Profiles,,,,,,,,,,,,,36,,2,2,4,2,3,1,3,2,2,2,1,4,2,2,2,1,1 -Total price excl Tax,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -Sample 1,HHS14001,3/9/2014,3/9/2014,Rooikloof Farm,Drilling,1.05,l,Animal feed,Canola,Glass Bottle 500ml,0,Normal,7,,1,,1,,1,,,1,,1,,1,,,1,, -Sample 2,HHS14002,3/9/2014,3/9/2014,Dispatch,Drilling,1.1,l,Water,Lucerne,Canvas bag,0,Normal,3,,,1,,,,,,,,,,1,,,,1, -Sample 3,HHS14003,3/9/2014,3/9/2014,Mixer,Drilling,0.95,l,Fiber,Canola,Micro Glass Bottle 500ml,0,Normal,3,,,,1,,,,,,,,,1,1,,,, -Sample 4,HHS14004,3/9/2014,3/9/2014,Packaging,Drilling,1.03,l,Fiber,Sunflower,Canvas bag,0,Normal,4,,,,,1,1,,1,,,1,,,,,,, -Sample 5,HHS14005,3/9/2014,3/9/2014,Packaging,Drilling,1.08,l,Grain,Canola,Plastic bottle,0,Normal,2,,,,,,,,,,,,1,,,1,,, -Sample 6,HHS14006,3/9/2014,3/9/2014,Bruma Lake,Drilling,0.93,l,Industrial Effluent,Dust,Plastic bottle,0,Normal,4,,,,1,,1,1,,,,,,,1,,,, -Sample 7,HHS14007,3/9/2014,3/9/2014,Shop - BB Supermarket,Drilling,1.01,l,Animal feed,Bran,Plastic bottle,0,Normal,4,,1,,,,,,,,1,,,,,1,,,1 -Sample 8,HHS14008,3/9/2014,3/9/2014,Borehole 12,Drilling,1.06,l,Fiber,Water,Glass bottle,0,Normal,4,,,,1,,,,1,,,,,1,,,1,, -Sample 9,HHS14009,3/9/2014,3/9/2014,Dry stock,Drilling,0.91,l,Sewage,Sunflower,Glass bottle,0,Normal,2,,,,,1,,,1,,,,,,,,,, -Sample 10,HHS14010,3/9/2014,3/9/2014,Packaging,Drilling,0.99,l,Fiber,Soya,Glass Bottle 500ml,0,Normal,3,,,1,,,,,,1,1,,,,,,,, diff --git a/bika/lims/tests/test_ARImport.py b/bika/lims/tests/test_ARImport.py deleted file mode 100644 index d83ebbfacc..0000000000 --- a/bika/lims/tests/test_ARImport.py +++ /dev/null @@ -1,249 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of SENAITE.CORE. -# -# SENAITE.CORE is free software: you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation, version 2. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., 51 -# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Copyright 2018-2020 by it's authors. -# Some rights reserved, see README and LICENSE. - -import re - -import transaction -from bika.lims.catalog import (CATALOG_ANALYSIS_LISTING, - CATALOG_ANALYSIS_REQUEST_LISTING) -from bika.lims.tests.base import BaseTestCase -from bika.lims.utils import tmpID -from bika.lims.workflow import doActionFor, getCurrentState -from plone.app.testing import (TEST_USER_ID, TEST_USER_NAME, - TEST_USER_PASSWORD, login, setRoles) -from Products.CMFCore.utils import getToolByName -from Products.CMFPlone.utils import _createObjectByType - -try: - import unittest2 as unittest -except ImportError: # Python 2.7 - import unittest - - -class TestARImports(BaseTestCase): - def addthing(self, folder, portal_type, **kwargs): - thing = _createObjectByType(portal_type, folder, tmpID()) - thing.unmarkCreationFlag() - thing.edit(**kwargs) - thing._renameAfterCreation() - return thing - - def setUp(self): - super(TestARImports, self).setUp() - setRoles(self.portal, TEST_USER_ID, ['Member', 'LabManager']) - login(self.portal, TEST_USER_NAME) - client = self.addthing( - self.portal.clients, 'Client', title='Happy Hills', ClientID='HH') - self.addthing( - client, 'Contact', Firstname='Rita Mohale', Lastname='Mohale') - self.addthing( - self.portal.bika_setup.bika_sampletypes, 'SampleType', - title='Water', Prefix='H2O') - self.addthing( - self.portal.bika_setup.bika_samplematrices, 'SampleMatrix', - title='Liquids') - self.addthing( - self.portal.bika_setup.bika_samplepoints, 'SamplePoint', - title='Toilet') - self.addthing( - self.portal.bika_setup.bika_containertypes, 'ContainerType', - title='Cup') - a = self.addthing( - self.portal.bika_setup.bika_analysisservices, 'AnalysisService', - title='Ecoli', Keyword="ECO") - b = self.addthing( - self.portal.bika_setup.bika_analysisservices, 'AnalysisService', - title='Salmonella', Keyword="SAL") - c = self.addthing( - self.portal.bika_setup.bika_analysisservices, 'AnalysisService', - title='Color', Keyword="COL") - d = self.addthing( - self.portal.bika_setup.bika_analysisservices, 'AnalysisService', - title='Taste', Keyword="TAS") - self.addthing( - self.portal.bika_setup.bika_analysisprofiles, 'AnalysisProfile', - title='MicroBio', Service=[a.UID(), b.UID()]) - self.addthing( - self.portal.bika_setup.bika_analysisprofiles, 'AnalysisProfile', - title='Properties', Service=[c.UID(), d.UID()]) - - def tearDown(self): - super(TestARImports, self).setUp() - login(self.portal, TEST_USER_NAME) - - def test_complete_valid_batch_import(self): - pc = getToolByName(self.portal, 'portal_catalog') - workflow = getToolByName(self.portal, 'portal_workflow') - client = self.portal.clients.objectValues()[0] - arimport = self.addthing(client, 'ARImport') - arimport.unmarkCreationFlag() - arimport.setFilename("test1.csv") - arimport.setOriginalFile(""" -Header, File name, Client name, Client ID, Contact, CC Names - Report, CC Emails - Report, CC Names - Invoice, CC Emails - Invoice, No of Samples, Client Order Number, Client Reference,, -Header Data, test1.csv, Happy Hills, HH, Rita Mohale, , , , , 10, HHPO-001, ,, -Batch Header, id, title, description, ClientBatchID, ClientBatchComment, BatchLabels, ReturnSampleToClient,,, -Batch Data, B15-0123, New Batch, Optional descr, CC 201506, Just a batch, , TRUE ,,, -Samples, ClientSampleID, SamplingDate,DateSampled,SamplePoint,SampleMatrix,SampleType,ContainerType,ReportDryMatter,Priority,Total number of Analyses or Profiles,Price excl Tax,ECO,SAL,COL,TAS,MicroBio,Properties -Analysis price,,,,,,,,,,,,,, -"Total Analyses or Profiles",,,,,,,,,,,,,9,,, -Total price excl Tax,,,,,,,,,,,,,, -"Sample 1", HHS14001, 3/9/2014, 3/9/2014, Toilet, Liquids, Water, Cup, 0, Normal, 1, 0, 0,0,0,0,0,1 -"Sample 2", HHS14002, 3/9/2014, 3/9/2014, Toilet, Liquids, Water, Cup, 0, Normal, 2, 0, 0,0,0,0,1,1 -"Sample 3", HHS14002, 3/9/2014, 3/9/2014, Toilet, Liquids, Water, Cup, 0, Normal, 4, 0, 1,1,1,1,0,0 -"Sample 4", HHS14002, 3/9/2014, 3/9/2014, Toilet, Liquids, Water, Cup, 0, Normal, 2, 0, 1,0,0,0,1,0 - """) - - # check that values are saved without errors - arimport.setErrors([]) - arimport.save_header_data() - arimport.save_sample_data() - arimport.create_or_reference_batch() - errors = arimport.getErrors() - if errors: - self.fail("Unexpected errors while saving data: " + str(errors)) - # check that batch was created and linked to arimport without errors - if not pc(portal_type='Batch'): - self.fail("Batch was not created!") - if not arimport.schema['Batch'].get(arimport): - self.fail("Batch was created, but not linked to ARImport.") - - # the workflow scripts use response.write(); silence them - arimport.REQUEST.response.write = lambda x: x - - # check that validation succeeds without any errors - workflow.doActionFor(arimport, 'validate') - state = workflow.getInfoFor(arimport, 'review_state') - if state != 'valid': - errors = arimport.getErrors() - self.fail( - 'Validation failed! %s.Errors: %s' % (arimport.id, errors)) - - # Import objects and verify that they exist - workflow.doActionFor(arimport, 'import') - state = workflow.getInfoFor(arimport, 'review_state') - if state != 'imported': - errors = arimport.getErrors() - self.fail( - 'Importation failed! %s.Errors: %s' % (arimport.id, errors)) - - barc = getToolByName(self.portal, CATALOG_ANALYSIS_REQUEST_LISTING) - ars = barc(portal_type='AnalysisRequest') - if not ars[0].getObject().getContact(): - self.fail('No Contact imported into ar.Contact field.') - l = len(ars) - if l != 4: - self.fail('4 AnalysisRequests were not created! We found %s' % l) - bac = getToolByName(self.portal, CATALOG_ANALYSIS_LISTING) - analyses = bac(portal_type='Analysis') - l = len(analyses) - if l != 12: - self.fail('12 Analysis not found! We found %s' % l) - states = [workflow.getInfoFor(a.getObject(), 'review_state') - for a in analyses] - ars_states = [ar.review_state for ar in ars] - if ars_states != ['sample_due'] * 4: - self.fail('Samples states should all be sample_due, ' - 'but are not!') - if states != ['registered'] * 12: - self.fail('Analysis states should all be registered, but are not!') - - def test_LIMS_2080_correctly_interpret_false_and_blank_values(self): - client = self.portal.clients.objectValues()[0] - arimport = self.addthing(client, 'ARImport') - arimport.unmarkCreationFlag() - arimport.setFilename("test1.csv") - arimport.setOriginalFile(""" -Header, File name, Client name, Client ID, Contact, CC Names - Report, CC Emails - Report, CC Names - Invoice, CC Emails - Invoice, No of Samples, Client Order Number, Client Reference,, -Header Data, test1.csv, Happy Hills, HH, Rita Mohale, , , , , 10, HHPO-001, ,, -Samples, ClientSampleID, SamplingDate,DateSampled,SamplePoint,SampleMatrix,SampleType,ContainerType,ReportDryMatter,Priority,Total number of Analyses or Profiles,Price excl Tax,ECO,SAL,COL,TAS,MicroBio,Properties -Analysis price,,,,,,,,,,,,,, -"Total Analyses or Profiles",,,,,,,,,,,,,9,,, -Total price excl Tax,,,,,,,,,,,,,, -"Sample 1", HHS14001, 3/9/2014, 3/9/2014, , , Water, Cup, 0, Normal, 1, 0, 0,0,0,0,0,1 -"Sample 2", HHS14002, 3/9/2014, 3/9/2014, , , Water, Cup, 0, Normal, 2, 0, 0,0,0,0,1,1 -"Sample 3", HHS14002, 3/9/2014, 3/9/2014, Toilet, Liquids, Water, Cup, 1, Normal, 4, 0, 1,1,1,1,0,0 -"Sample 4", HHS14002, 3/9/2014, 3/9/2014, Toilet, Liquids, Water, Cup, 1, Normal, 2, 0, 1,0,0,0,1,0 - """) - - # check that values are saved without errors - arimport.setErrors([]) - arimport.save_header_data() - arimport.save_sample_data() - errors = arimport.getErrors() - if errors: - self.fail("Unexpected errors while saving data: " + str(errors)) - transaction.commit() - browser = self.getBrowser( - username=TEST_USER_NAME, - password=TEST_USER_PASSWORD, - loggedIn=True) - - doActionFor(arimport, 'validate') - c_state = getCurrentState(arimport) - self.assertTrue( - c_state == 'valid', - "ARrimport in 'invalid' state after it has been transitioned to " - "'valid'.") - browser.open(arimport.absolute_url() + "/edit") - content = browser.contents - re.match( - '', - content) - if len(re.findall('<.*selected.*Toilet', content)) != 2: - self.fail("Should be two empty SamplePoints, and two with values") - if len(re.findall('<.*selected.*Liquids', content)) != 2: - self.fail("Should be two empty Matrix fields, and two with values") - - def test_LIMS_2081_post_edit_fails_validation_gracefully(self): - client = self.portal.clients.objectValues()[0] - arimport = self.addthing(client, 'ARImport') - arimport.unmarkCreationFlag() - arimport.setFilename("test1.csv") - arimport.setOriginalFile(""" -Header, File name, Client name, Client ID, Contact, CC Names - Report, CC Emails - Report, CC Names - Invoice, CC Emails - Invoice, No of Samples, Client Order Number, Client Reference,, -Header Data, test1.csv, Happy Hills, HH, Rita Mohale, , , , , 10, HHPO-001, ,, -Samples, ClientSampleID, SamplingDate,DateSampled,SamplePoint,SampleMatrix,SampleType,ContainerType,ReportDryMatter,Priority,Total number of Analyses or Profiles,Price excl Tax,ECO,SAL,COL,TAS,MicroBio,Properties -Analysis price,,,,,,,,,,,,,, -"Total Analyses or Profiles",,,,,,,,,,,,,9,,, -Total price excl Tax,,,,,,,,,,,,,, -"Sample 1", HHS14001, 3/9/2014, 3/9/2014, , , Water, Cup, 0, Normal, 1, 0, 0,0,0,0,0,1 - """) - - # check that values are saved without errors - arimport.setErrors([]) - arimport.save_header_data() - arimport.save_sample_data() - arimport.create_or_reference_batch() - errors = arimport.getErrors() - if errors: - self.fail("Unexpected errors while saving data: " + str(errors)) - transaction.commit() - browser = self.getBrowser(loggedIn=True) - browser.open(arimport.absolute_url() + "/edit") - browser.getControl(name="ClientReference").value = 'test_reference' - browser.getControl(name="form.button.save").click() - if 'test_reference' not in browser.contents: - self.fail('Failed to modify ARImport object (Client Reference)') - - -def test_suite(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestARImports)) - return suite diff --git a/bika/lims/upgrade/v01_03_003.py b/bika/lims/upgrade/v01_03_003.py index 6cd860f972..f0835eefdc 100644 --- a/bika/lims/upgrade/v01_03_003.py +++ b/bika/lims/upgrade/v01_03_003.py @@ -45,6 +45,19 @@ profile = "profile-{0}:default".format(product) +TYPES_TO_REMOVE = [ + "ARImport", + "SamplesFolder", + "BikaCache", + # invoices were removed in upgrade step 1.3.0 + "InvoiceBatch", + "InvoiceFolder", +] + +WFS_TO_REMOVE = [ + "bika_arimport_workflow", +] + JAVASCRIPTS_TO_REMOVE = [ # moved from lims -> core "++resource++senaite.lims.jquery.js/jquery-2.2.4.min.js", @@ -364,6 +377,10 @@ def upgrade(tool): # setup html filtering setup_html_filter(portal) + # remove stale type regsitrations + # https://github.com/senaite/senaite.core/pull/1530 + remove_stale_type_registrations(portal) + # Fix email addresses # https://github.com/senaite/senaite.core/pull/1542 fix_email_address(portal) @@ -779,6 +796,26 @@ def update_wf_received_samples(portal): logger.info("Updating workflow mappings for received samples [DONE]") +def remove_stale_type_registrations(portal): + """Remove stale contents from the portal_types + """ + logger.info("Removing stale type registrations ...") + + pt = portal.portal_types + for t in TYPES_TO_REMOVE: + logger.info("Removing type registrations for '{}'".format(t)) + if t in pt.objectIds(): + pt.manage_delObjects(t) + + wf_tool = portal.portal_workflow + for wf in WFS_TO_REMOVE: + if wf in wf_tool: + logger.info("Removing Workflow '{}'".format(wf)) + wf_tool.manage_delObjects(wf) + + logger.info("Removing stale type registrations [DONE]") + + def fix_email_address(portal, portal_types=None, catalog_id="portal_catalog"): """Validates the email address of portal types that inherit from Person. The field did not have an email validator, causing some views to fail when