-
-
-
diff --git a/bika/lims/config.py b/bika/lims/config.py
index 770da18755..b520cf53f6 100644
--- a/bika/lims/config.py
+++ b/bika/lims/config.py
@@ -88,11 +88,6 @@
('r', _('Render in Report')),
('i', _('Ignore in Report')),
))
-ARIMPORT_OPTIONS = DisplayList((
- ('c', _('Classic')),
- ('p', _('Profiles')),
- # ('s', _('Special')),
-))
GENDERS = DisplayList((
('male', _('Male')),
('female', _('Female')),
diff --git a/bika/lims/content/arimport.py b/bika/lims/content/arimport.py
deleted file mode 100644
index fc2f245156..0000000000
--- a/bika/lims/content/arimport.py
+++ /dev/null
@@ -1,964 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of SENAITE.CORE.
-#
-# SENAITE.CORE is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free Software
-# Foundation, version 2.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
-# details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Copyright 2018-2020 by it's authors.
-# Some rights reserved, see README and LICENSE.
-
-from AccessControl import ClassSecurityInfo
-import csv
-from copy import deepcopy
-from DateTime.DateTime import DateTime
-from Products.Archetypes.event import ObjectInitializedEvent
-from Products.CMFCore.WorkflowCore import WorkflowException
-from bika.lims import bikaMessageFactory as _
-from bika.lims.browser import ulocalized_time
-from bika.lims.config import PROJECTNAME
-from bika.lims.content.bikaschema import BikaSchema
-from bika.lims.content.analysisrequest import schema as ar_schema
-from bika.lims.content.sample import schema as sample_schema
-from bika.lims.idserver import renameAfterCreation
-from bika.lims.interfaces import IARImport, IClient
-from bika.lims.utils import tmpID
-from bika.lims.utils.analysisrequest import create_analysisrequest
-from bika.lims.vocabularies import CatalogVocabulary
-from bika.lims.workflow import doActionFor
-from collective.progressbar.events import InitialiseProgressBar
-from collective.progressbar.events import ProgressBar
-from collective.progressbar.events import ProgressState
-from collective.progressbar.events import UpdateProgressEvent
-from Products.Archetypes import atapi
-from Products.Archetypes.public import *
-from plone.app.blob.field import FileField as BlobFileField
-from Products.Archetypes.references import HoldingReference
-from Products.Archetypes.utils import addStatusMessage
-from Products.CMFCore.utils import getToolByName
-from Products.CMFPlone.utils import _createObjectByType
-from Products.DataGridField import CheckboxColumn
-from Products.DataGridField import Column
-from Products.DataGridField import DataGridField
-from Products.DataGridField import DataGridWidget
-from Products.DataGridField import DateColumn
-from Products.DataGridField import LinesColumn
-from Products.DataGridField import SelectColumn
-from zope import event
-from zope.event import notify
-from zope.i18nmessageid import MessageFactory
-from zope.interface import implements
-
-from bika.lims.browser.widgets import ReferenceWidget as bReferenceWidget
-
-import sys
-import transaction
-
-_p = MessageFactory(u"plone")
-
-OriginalFile = BlobFileField(
- 'OriginalFile',
- widget=ComputedWidget(
- visible=False
- ),
-)
-
-Filename = StringField(
- 'Filename',
- widget=StringWidget(
- label=_('Original Filename'),
- visible=True
- ),
-)
-
-NrSamples = StringField(
- 'NrSamples',
- widget=StringWidget(
- label=_('Number of samples'),
- visible=True
- ),
-)
-
-ClientName = StringField(
- 'ClientName',
- searchable=True,
- widget=StringWidget(
- label=_("Client Name"),
- ),
-)
-
-ClientID = StringField(
- 'ClientID',
- searchable=True,
- widget=StringWidget(
- label=_('Client ID'),
- ),
-)
-
-ClientOrderNumber = StringField(
- 'ClientOrderNumber',
- searchable=True,
- widget=StringWidget(
- label=_('Client Order Number'),
- ),
-)
-
-ClientReference = StringField(
- 'ClientReference',
- searchable=True,
- widget=StringWidget(
- label=_('Client Reference'),
- ),
-)
-
-Contact = ReferenceField(
- 'Contact',
- allowed_types=('Contact',),
- relationship='ARImportContact',
- default_method='getContactUIDForUser',
- referenceClass=HoldingReference,
- vocabulary_display_path_bound=sys.maxint,
- widget=ReferenceWidget(
- label=_('Primary Contact'),
- size=20,
- visible=True,
- base_query={'is_active': True},
- showOn=True,
- popup_width='300px',
- colModel=[{'columnName': 'UID', 'hidden': True},
- {'columnName': 'Fullname', 'width': '100',
- 'label': _('Name')}],
- ),
-)
-
-Batch = ReferenceField(
- 'Batch',
- allowed_types=('Batch',),
- relationship='ARImportBatch',
- widget=bReferenceWidget(
- label=_('Batch'),
- visible=True,
- catalog_name='bika_catalog',
- base_query={'review_state': 'open'},
- showOn=True,
- ),
-)
-
-CCContacts = DataGridField(
- 'CCContacts',
- allow_insert=False,
- allow_delete=False,
- allow_reorder=False,
- allow_empty_rows=False,
- columns=('CCNamesReport',
- 'CCEmailsReport',
- 'CCNamesInvoice',
- 'CCEmailsInvoice'),
- default=[{'CCNamesReport': [],
- 'CCEmailsReport': [],
- 'CCNamesInvoice': [],
- 'CCEmailsInvoice': []
- }],
- widget=DataGridWidget(
- columns={
- 'CCNamesReport': LinesColumn('Report CC Contacts'),
- 'CCEmailsReport': LinesColumn('Report CC Emails'),
- 'CCNamesInvoice': LinesColumn('Invoice CC Contacts'),
- 'CCEmailsInvoice': LinesColumn('Invoice CC Emails')
- }
- )
-)
-
-SampleData = DataGridField(
- 'SampleData',
- allow_insert=True,
- allow_delete=True,
- allow_reorder=False,
- allow_empty_rows=False,
- allow_oddeven=True,
- columns=('ClientSampleID',
- 'SamplingDate',
- 'DateSampled',
- 'SamplePoint',
- 'SampleMatrix',
- 'SampleType', # not a schema field!
- 'ContainerType', # not a schema field!
- 'Analyses', # not a schema field!
- 'Profiles' # not a schema field!
- ),
- widget=DataGridWidget(
- label=_('Samples'),
- columns={
- 'ClientSampleID': Column('Sample ID'),
- 'SamplingDate': DateColumn('Sampling Date'),
- 'DateSampled': DateColumn('Date Sampled'),
- 'SamplePoint': SelectColumn(
- 'Sample Point', vocabulary='Vocabulary_SamplePoint'),
- 'SampleMatrix': SelectColumn(
- 'Sample Matrix', vocabulary='Vocabulary_SampleMatrix'),
- 'SampleType': SelectColumn(
- 'Sample Type', vocabulary='Vocabulary_SampleType'),
- 'ContainerType': SelectColumn(
- 'Container', vocabulary='Vocabulary_ContainerType'),
- 'Analyses': LinesColumn('Analyses'),
- 'Profiles': LinesColumn('Profiles'),
- }
- )
-)
-
-Errors = LinesField(
- 'Errors',
- widget=LinesWidget(
- label=_('Errors'),
- rows=10,
- )
-)
-
-schema = BikaSchema.copy() + Schema((
- OriginalFile,
- Filename,
- NrSamples,
- ClientName,
- ClientID,
- ClientOrderNumber,
- ClientReference,
- Contact,
- CCContacts,
- Batch,
- SampleData,
- Errors,
-))
-
-schema['title'].validators = ()
-# Update the validation layer after change the validator in runtime
-schema['title']._validationLayer()
-
-
-class ARImport(BaseFolder):
- security = ClassSecurityInfo()
- schema = schema
- displayContentsTab = False
- implements(IARImport)
-
- _at_rename_after_creation = True
-
- def _renameAfterCreation(self, check_auto_id=False):
- renameAfterCreation(self)
-
- def guard_validate_transition(self):
- """We may only attempt validation if file data has been uploaded.
- """
- data = self.getOriginalFile()
- if data and len(data):
- return True
-
- # TODO Workflow - ARImport - Remove
- def workflow_before_validate(self):
- """This function transposes values from the provided file into the
- ARImport object's fields, and checks for invalid values.
-
- If errors are found:
- - Validation transition is aborted.
- - Errors are stored on object and displayed to user.
-
- """
- # Re-set the errors on this ARImport each time validation is attempted.
- # When errors are detected they are immediately appended to this field.
- self.setErrors([])
-
- self.validate_headers()
- self.validate_samples()
-
- if self.getErrors():
- addStatusMessage(self.REQUEST, _p('Validation errors.'), 'error')
- transaction.commit()
- self.REQUEST.response.write(
- '' % (
- self.absolute_url()))
- self.REQUEST.response.write(
- '' % (
- self.absolute_url()))
-
- def at_post_edit_script(self):
- workflow = getToolByName(self, 'portal_workflow')
- trans_ids = [t['id'] for t in workflow.getTransitionsFor(self)]
- if 'validate' in trans_ids:
- workflow.doActionFor(self, 'validate')
-
- def workflow_script_import(self):
- """Create objects from valid ARImport
- """
- bsc = getToolByName(self, 'bika_setup_catalog')
- client = self.aq_parent
-
- title = _('Submitting Sample Import')
- description = _('Creating and initialising objects')
- bar = ProgressBar(self, self.REQUEST, title, description)
- notify(InitialiseProgressBar(bar))
-
- profiles = [x.getObject() for x in bsc(portal_type='AnalysisProfile')]
-
- gridrows = self.schema['SampleData'].get(self)
- row_cnt = 0
- for therow in gridrows:
- row = deepcopy(therow)
- row_cnt += 1
-
- # Profiles are titles, profile keys, or UIDS: convert them to UIDs.
- newprofiles = []
- for title in row['Profiles']:
- objects = [x for x in profiles
- if title in (x.getProfileKey(), x.UID(), x.Title())]
- for obj in objects:
- newprofiles.append(obj.UID())
- row['Profiles'] = newprofiles
-
- # Same for analyses
- newanalyses = set(self.get_row_services(row) +
- self.get_row_profile_services(row))
- # get batch
- batch = self.schema['Batch'].get(self)
- if batch:
- row['Batch'] = batch.UID()
- # Add AR fields from schema into this row's data
- row['ClientReference'] = self.getClientReference()
- row['ClientOrderNumber'] = self.getClientOrderNumber()
- contact_uid =\
- self.getContact().UID() if self.getContact() else None
- row['Contact'] = contact_uid
- # Creating analysis request from gathered data
- ar = create_analysisrequest(
- client,
- self.REQUEST,
- row,
- analyses=list(newanalyses),)
-
- # progress marker update
- progress_index = float(row_cnt) / len(gridrows) * 100
- progress = ProgressState(self.REQUEST, progress_index)
- notify(UpdateProgressEvent(progress))
-
- # document has been written to, and redirect() fails here
- self.REQUEST.response.write(
- '' % (
- self.absolute_url()))
-
- def get_header_values(self):
- """Scrape the "Header" values from the original input file
- """
- lines = self.getOriginalFile().data.splitlines()
- reader = csv.reader(lines)
- header_fields = header_data = []
- for row in reader:
- if not any(row):
- continue
- if row[0].strip().lower() == 'header':
- header_fields = [x.strip() for x in row][1:]
- continue
- if row[0].strip().lower() == 'header data':
- header_data = [x.strip() for x in row][1:]
- break
- if not (header_data or header_fields):
- return None
- if not (header_data and header_fields):
- self.error("File is missing header row or header data")
- return None
- # inject us out of here
- values = dict(zip(header_fields, header_data))
- # blank cell from sheet will probably make it in here:
- if '' in values:
- del (values[''])
- return values
-
- def save_header_data(self):
- """Save values from the file's header row into their schema fields.
- """
- client = self.aq_parent
-
- headers = self.get_header_values()
- if not headers:
- return False
-
- # Plain header fields that can be set into plain schema fields:
- for h, f in [
- ('File name', 'Filename'),
- ('No of Samples', 'NrSamples'),
- ('Client name', 'ClientName'),
- ('Client ID', 'ClientID'),
- ('Client Order Number', 'ClientOrderNumber'),
- ('Client Reference', 'ClientReference')
- ]:
- v = headers.get(h, None)
- if v:
- field = self.schema[f]
- field.set(self, v)
- del (headers[h])
-
- # Primary Contact
- v = headers.get('Contact', None)
- contacts = [x for x in client.objectValues('Contact')]
- contact = [c for c in contacts if c.Title() == v]
- if contact:
- self.schema['Contact'].set(self, contact)
- else:
- self.error("Specified contact '%s' does not exist; using '%s'"%
- (v, contacts[0].Title()))
- self.schema['Contact'].set(self, contacts[0])
- del (headers['Contact'])
-
- # CCContacts
- field_value = {
- 'CCNamesReport': '',
- 'CCEmailsReport': '',
- 'CCNamesInvoice': '',
- 'CCEmailsInvoice': ''
- }
- for h, f in [
- # csv header name DataGrid Column ID
- ('CC Names - Report', 'CCNamesReport'),
- ('CC Emails - Report', 'CCEmailsReport'),
- ('CC Names - Invoice', 'CCNamesInvoice'),
- ('CC Emails - Invoice', 'CCEmailsInvoice'),
- ]:
- if h in headers:
- values = [x.strip() for x in headers.get(h, '').split(",")]
- field_value[f] = values if values else ''
- del (headers[h])
- self.schema['CCContacts'].set(self, [field_value])
-
- if headers:
- unexpected = ','.join(headers.keys())
- self.error("Unexpected header fields: %s" % unexpected)
-
- def get_sample_values(self):
- """Read the rows specifying Samples and return a dictionary with
- related data.
-
- keys are:
- headers - row with "Samples" in column 0. These headers are
- used as dictionary keys in the rows below.
- prices - Row with "Analysis Price" in column 0.
- total_analyses - Row with "Total analyses" in colmn 0
- price_totals - Row with "Total price excl Tax" in column 0
- samples - All other sample rows.
-
- """
- res = {'samples': []}
- lines = self.getOriginalFile().data.splitlines()
- reader = csv.reader(lines)
- next_rows_are_sample_rows = False
- for row in reader:
- if not any(row):
- continue
- if next_rows_are_sample_rows:
- vals = [x.strip() for x in row]
- if not any(vals):
- continue
- res['samples'].append(zip(res['headers'], vals))
- elif row[0].strip().lower() == 'samples':
- res['headers'] = [x.strip() for x in row]
- elif row[0].strip().lower() == 'analysis price':
- res['prices'] = \
- zip(res['headers'], [x.strip() for x in row])
- elif row[0].strip().lower() == 'total analyses':
- res['total_analyses'] = \
- zip(res['headers'], [x.strip() for x in row])
- elif row[0].strip().lower() == 'total price excl tax':
- res['price_totals'] = \
- zip(res['headers'], [x.strip() for x in row])
- next_rows_are_sample_rows = True
- return res
-
- def save_sample_data(self):
- """Save values from the file's header row into the DataGrid columns
- after doing some very basic validation
- """
- bsc = getToolByName(self, 'bika_setup_catalog')
- keywords = self.bika_setup_catalog.uniqueValuesFor('getKeyword')
- profiles = []
- for p in bsc(portal_type='AnalysisProfile'):
- p = p.getObject()
- profiles.append(p.Title())
- profiles.append(p.getProfileKey())
-
- sample_data = self.get_sample_values()
- if not sample_data:
- return False
-
- # columns that we expect, but do not find, are listed here.
- # we report on them only once, after looping through sample rows.
- missing = set()
-
- # This contains all sample header rows that were not handled
- # by this code
- unexpected = set()
-
- # Save other errors here instead of sticking them directly into
- # the field, so that they show up after MISSING and before EXPECTED
- errors = []
-
- # This will be the new sample-data field value, when we are done.
- grid_rows = []
-
- row_nr = 0
- for row in sample_data['samples']:
- row = dict(row)
- row_nr += 1
-
- # sid is just for referring the user back to row X in their
- # in put spreadsheet
- gridrow = {'sid': row['Samples']}
- del (row['Samples'])
-
- # We'll use this later to verify the number against selections
- if 'Total number of Analyses or Profiles' in row:
- nr_an = row['Total number of Analyses or Profiles']
- del (row['Total number of Analyses or Profiles'])
- else:
- nr_an = 0
- try:
- nr_an = int(nr_an)
- except ValueError:
- nr_an = 0
-
- # TODO this is ignored and is probably meant to serve some purpose.
- del (row['Price excl Tax'])
-
- # ContainerType - not part of sample or AR schema
- if 'ContainerType' in row:
- title = row['ContainerType']
- if title:
- obj = self.lookup(('ContainerType',),
- Title=row['ContainerType'])
- if obj:
- gridrow['ContainerType'] = obj[0].UID
- del (row['ContainerType'])
-
- if 'SampleMatrix' in row:
- # SampleMatrix - not part of sample or AR schema
- title = row['SampleMatrix']
- if title:
- obj = self.lookup(('SampleMatrix',),
- Title=row['SampleMatrix'])
- if obj:
- gridrow['SampleMatrix'] = obj[0].UID
- del (row['SampleMatrix'])
-
- # match against sample schema
- for k, v in row.items():
- if k in ['Analyses', 'Profiles']:
- continue
- if k in sample_schema:
- del (row[k])
- if v:
- try:
- value = self.munge_field_value(
- sample_schema, row_nr, k, v)
- gridrow[k] = value
- except ValueError as e:
- errors.append(e.message)
-
- # match against ar schema
- for k, v in row.items():
- if k in ['Analyses', 'Profiles']:
- continue
- if k in ar_schema:
- del (row[k])
- if v:
- try:
- value = self.munge_field_value(
- ar_schema, row_nr, k, v)
- gridrow[k] = value
- except ValueError as e:
- errors.append(e.message)
-
- # Count and remove Keywords and Profiles from the list
- gridrow['Analyses'] = []
- for k, v in row.items():
- if k in keywords:
- del (row[k])
- if str(v).strip().lower() not in ('', '0', 'false'):
- gridrow['Analyses'].append(k)
- gridrow['Profiles'] = []
- for k, v in row.items():
- if k in profiles:
- del (row[k])
- if str(v).strip().lower() not in ('', '0', 'false'):
- gridrow['Profiles'].append(k)
- if len(gridrow['Analyses']) + len(gridrow['Profiles']) != nr_an:
- errors.append(
- "Row %s: Number of analyses does not match provided value" %
- row_nr)
-
- grid_rows.append(gridrow)
-
- self.setSampleData(grid_rows)
-
- if missing:
- self.error("SAMPLES: Missing expected fields: %s" %
- ','.join(missing))
-
- for thing in errors:
- self.error(thing)
-
- if unexpected:
- self.error("Unexpected header fields: %s" %
- ','.join(unexpected))
-
- def get_batch_header_values(self):
- """Scrape the "Batch Header" values from the original input file
- """
- lines = self.getOriginalFile().data.splitlines()
- reader = csv.reader(lines)
- batch_headers = batch_data = []
- for row in reader:
- if not any(row):
- continue
- if row[0].strip().lower() == 'batch header':
- batch_headers = [x.strip() for x in row][1:]
- continue
- if row[0].strip().lower() == 'batch data':
- batch_data = [x.strip() for x in row][1:]
- break
- if not (batch_data or batch_headers):
- return None
- if not (batch_data and batch_headers):
- self.error("Missing batch headers or data")
- return None
- # Inject us out of here
- values = dict(zip(batch_headers, batch_data))
- return values
-
- def create_or_reference_batch(self):
- """Save reference to batch, if existing batch specified
- Create new batch, if possible with specified values
- """
- client = self.aq_parent
- batch_headers = self.get_batch_header_values()
- if not batch_headers:
- return False
- # if the Batch's Title is specified and exists, no further
- # action is required. We will just set the Batch field to
- # use the existing object.
- batch_title = batch_headers.get('title', False)
- if batch_title:
- existing_batch = [x for x in client.objectValues('Batch')
- if x.title == batch_title]
- if existing_batch:
- self.setBatch(existing_batch[0])
- return existing_batch[0]
- # If the batch title is specified but does not exist,
- # we will attempt to create the bach now.
- if 'title' in batch_headers:
- if 'id' in batch_headers:
- del (batch_headers['id'])
- if '' in batch_headers:
- del (batch_headers[''])
- batch = _createObjectByType('Batch', client, tmpID())
- batch.processForm()
- batch.edit(**batch_headers)
- self.setBatch(batch)
-
- def munge_field_value(self, schema, row_nr, fieldname, value):
- """Convert a spreadsheet value into a field value that fits in
- the corresponding schema field.
- - boolean: All values are true except '', 'false', or '0'.
- - reference: The title of an object in field.allowed_types;
- returns a UID or list of UIDs
- - datetime: returns a string value from ulocalized_time
-
- Tho this is only used during "Saving" of csv data into schema fields,
- it will flag 'validation' errors, as this is the only chance we will
- get to complain about these field values.
-
- """
- field = schema[fieldname]
- if field.type == 'boolean':
- value = str(value).strip().lower()
- value = '' if value in ['0', 'no', 'false', 'none'] else '1'
- return value
- if field.type == 'reference':
- value = str(value).strip()
- brains = self.lookup(field.allowed_types, Title=value)
- if not brains:
- brains = self.lookup(field.allowed_types, UID=value)
- if not brains:
- raise ValueError('Row %s: value is invalid (%s=%s)' % (
- row_nr, fieldname, value))
- if field.multiValued:
- return [b.UID for b in brains] if brains else []
- else:
- return brains[0].UID if brains else None
- if field.type == 'datetime':
- try:
- value = DateTime(value)
- return ulocalized_time(
- value, long_format=True, time_only=False, context=self)
- except:
- raise ValueError('Row %s: value is invalid (%s=%s)' % (
- row_nr, fieldname, value))
- return str(value)
-
- def validate_headers(self):
- """Validate headers fields from schema
- """
-
- pc = getToolByName(self, 'portal_catalog')
- pu = getToolByName(self, "plone_utils")
-
- client = self.aq_parent
-
- # Verify Client Name
- if self.getClientName() != client.Title():
- self.error("%s: value is invalid (%s)." % (
- 'Client name', self.getClientName()))
-
- # Verify Client ID
- if self.getClientID() != client.getClientID():
- self.error("%s: value is invalid (%s)." % (
- 'Client ID', self.getClientID()))
-
- existing_arimports = pc(portal_type='ARImport',
- review_state=['valid', 'imported'])
- # Verify Client Order Number
- for arimport in existing_arimports:
- if arimport.UID == self.UID() \
- or not arimport.getClientOrderNumber():
- continue
- arimport = arimport.getObject()
-
- if arimport.getClientOrderNumber() == self.getClientOrderNumber():
- self.error('%s: already used by existing ARImport.' %
- 'ClientOrderNumber')
- break
-
- # Verify Client Reference
- for arimport in existing_arimports:
- if arimport.UID == self.UID() \
- or not arimport.getClientReference():
- continue
- arimport = arimport.getObject()
- if arimport.getClientReference() == self.getClientReference():
- self.error('%s: already used by existing ARImport.' %
- 'ClientReference')
- break
-
- # getCCContacts has no value if object is not complete (eg during test)
- if self.getCCContacts():
- cc_contacts = self.getCCContacts()[0]
- contacts = [x for x in client.objectValues('Contact')]
- contact_names = [c.Title() for c in contacts]
- # validate Contact existence in this Client
- for k in ['CCNamesReport', 'CCNamesInvoice']:
- for val in cc_contacts[k]:
- if val and val not in contact_names:
- self.error('%s: value is invalid (%s)' % (k, val))
- else:
- cc_contacts = {'CCNamesReport': [],
- 'CCEmailsReport': [],
- 'CCNamesInvoice': [],
- 'CCEmailsInvoice': []
- }
- # validate Contact existence in this Client
- for k in ['CCEmailsReport', 'CCEmailsInvoice']:
- for val in cc_contacts.get(k, []):
- if val and not pu.validateSingleNormalizedEmailAddress(val):
- self.error('%s: value is invalid (%s)' % (k, val))
-
- def validate_samples(self):
- """Scan through the SampleData values and make sure
- that each one is correct
- """
-
- bsc = getToolByName(self, 'bika_setup_catalog')
- keywords = bsc.uniqueValuesFor('getKeyword')
- profiles = []
- for p in bsc(portal_type='AnalysisProfile'):
- p = p.getObject()
- profiles.append(p.Title())
- profiles.append(p.getProfileKey())
-
- row_nr = 0
- for gridrow in self.getSampleData():
- row_nr += 1
-
- # validate against sample and ar schemas
- for k, v in gridrow.items():
- if k in ['Analysis', 'Profiles']:
- break
- if k in sample_schema:
- try:
- self.validate_against_schema(
- sample_schema, row_nr, k, v)
- continue
- except ValueError as e:
- self.error(e.message)
- break
- if k in ar_schema:
- try:
- self.validate_against_schema(
- ar_schema, row_nr, k, v)
- except ValueError as e:
- self.error(e.message)
-
- an_cnt = 0
- for v in gridrow['Analyses']:
- if v and v not in keywords:
- self.error("Row %s: value is invalid (%s=%s)" %
- ('Analysis keyword', row_nr, v))
- else:
- an_cnt += 1
- for v in gridrow['Profiles']:
- if v and v not in profiles:
- self.error("Row %s: value is invalid (%s=%s)" %
- ('Profile Title', row_nr, v))
- else:
- an_cnt += 1
- if not an_cnt:
- self.error("Row %s: No valid analyses or profiles" % row_nr)
-
- def validate_against_schema(self, schema, row_nr, fieldname, value):
- """
- """
- field = schema[fieldname]
- if field.type == 'boolean':
- value = str(value).strip().lower()
- return value
- if field.type == 'reference':
- value = str(value).strip()
- if field.required and not value:
- raise ValueError("Row %s: %s field requires a value" % (
- row_nr, fieldname))
- if not value:
- return value
- brains = self.lookup(field.allowed_types, UID=value)
- if not brains:
- raise ValueError("Row %s: value is invalid (%s=%s)" % (
- row_nr, fieldname, value))
- if field.multiValued:
- return [b.UID for b in brains] if brains else []
- else:
- return brains[0].UID if brains else None
- if field.type == 'datetime':
- try:
- ulocalized_time(DateTime(value), long_format=True,
- time_only=False, context=self)
- except:
- raise ValueError('Row %s: value is invalid (%s=%s)' % (
- row_nr, fieldname, value))
- return value
-
- def lookup(self, allowed_types, **kwargs):
- """Lookup an object of type (allowed_types). kwargs is sent
- directly to the catalog.
- """
- at = getToolByName(self, 'archetype_tool')
- for portal_type in allowed_types:
- catalog = at.catalog_map.get(portal_type, [None])[0]
- catalog = getToolByName(self, catalog)
- kwargs['portal_type'] = portal_type
- brains = catalog(**kwargs)
- if brains:
- return brains
-
- def get_row_services(self, row):
- """Return a list of services which are referenced in Analyses.
- values may be UID, Title or Keyword.
- """
- bsc = getToolByName(self, 'bika_setup_catalog')
- services = set()
- for val in row.get('Analyses', []):
- brains = bsc(portal_type='AnalysisService', getKeyword=val)
- if not brains:
- brains = bsc(portal_type='AnalysisService', title=val)
- if not brains:
- brains = bsc(portal_type='AnalysisService', UID=val)
- if brains:
- services.add(brains[0].UID)
- else:
- self.error("Invalid analysis specified: %s" % val)
- return list(services)
-
- def get_row_profile_services(self, row):
- """Return a list of services which are referenced in profiles
- values may be UID, Title or ProfileKey.
- """
- bsc = getToolByName(self, 'bika_setup_catalog')
- services = set()
- profiles = [x.getObject() for x in bsc(portal_type='AnalysisProfile')]
- for val in row.get('Profiles', []):
- objects = [x for x in profiles
- if val in (x.getProfileKey(), x.UID(), x.Title())]
- if objects:
- for service in objects[0].getService():
- services.add(service.UID())
- else:
- self.error("Invalid profile specified: %s" % val)
- return list(services)
-
- def get_row_container(self, row):
- """Return a sample container
- """
- bsc = getToolByName(self, 'bika_setup_catalog')
- val = row.get('Container', False)
- if val:
- brains = bsc(portal_type='Container', UID=row['Container'])
- if brains:
- brains[0].getObject()
- brains = bsc(portal_type='ContainerType', UID=row['Container'])
- if brains:
- # XXX Cheating. The calculation of capacity vs. volume is not done.
- return brains[0].getObject()
- return None
-
- def get_row_profiles(self, row):
- bsc = getToolByName(self, 'bika_setup_catalog')
- profiles = []
- for profile_title in row.get('Profiles', []):
- profile = bsc(portal_type='AnalysisProfile', title=profile_title)
- profiles.append(profile)
- return profiles
-
- def Vocabulary_SamplePoint(self):
- vocabulary = CatalogVocabulary(self)
- vocabulary.catalog = 'bika_setup_catalog'
- folders = [self.bika_setup.bika_samplepoints]
- if IClient.providedBy(self.aq_parent):
- folders.append(self.aq_parent)
- return vocabulary(allow_blank=True, portal_type='SamplePoint')
-
- def Vocabulary_SampleMatrix(self):
- vocabulary = CatalogVocabulary(self)
- vocabulary.catalog = 'bika_setup_catalog'
- return vocabulary(allow_blank=True, portal_type='SampleMatrix')
-
- def Vocabulary_SampleType(self):
- vocabulary = CatalogVocabulary(self)
- vocabulary.catalog = 'bika_setup_catalog'
- folders = [self.bika_setup.bika_sampletypes]
- if IClient.providedBy(self.aq_parent):
- folders.append(self.aq_parent)
- return vocabulary(allow_blank=True, portal_type='SampleType')
-
- def Vocabulary_ContainerType(self):
- vocabulary = CatalogVocabulary(self)
- vocabulary.catalog = 'bika_setup_catalog'
- return vocabulary(allow_blank=True, portal_type='ContainerType')
-
- def error(self, msg):
- errors = list(self.getErrors())
- errors.append(msg)
- self.setErrors(errors)
-
-
-atapi.registerType(ARImport, PROJECTNAME)
diff --git a/bika/lims/content/bikacache.py b/bika/lims/content/bikacache.py
deleted file mode 100644
index 21114b8a6b..0000000000
--- a/bika/lims/content/bikacache.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of SENAITE.CORE.
-#
-# SENAITE.CORE is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free Software
-# Foundation, version 2.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
-# details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Copyright 2018-2020 by it's authors.
-# Some rights reserved, see README and LICENSE.
-
-from zope.interface import implements
-from Products.Archetypes import atapi
-from Products.Archetypes.public import BaseContent
-from bika.lims.content.bikaschema import BikaSchema
-from bika.lims import bikaMessageFactory as _
-from bika.lims import config
-
-
-schema = BikaSchema.copy() + atapi.Schema((
- #'Key' field is name of the Cache object, must be Unique
- atapi.StringField('Key',default=''),
-
- # 'Value' is ID of the last created object. Must be inscreased before using.
- atapi.StringField('Value',default='')
-
-))
-
-schema['title'].widget.visible = False
-
-class BikaCache(BaseContent):
- """
- BikaCache objects stores information about 'Last Created ID's of different
- types. For each object type, there must be only one Cache object, and the ID
- of its Last Created Object.
- It is used to avoid querying whole catalog just to get the highest ID for any
- kind of object.
- """
- schema = schema
-
-# Activating the content type in Archetypes' internal types registry
-atapi.registerType(BikaCache, config.PROJECTNAME)
diff --git a/bika/lims/content/bikasetup.py b/bika/lims/content/bikasetup.py
index d5464f86d8..46ea7ca27b 100644
--- a/bika/lims/content/bikasetup.py
+++ b/bika/lims/content/bikasetup.py
@@ -46,7 +46,6 @@
from bika.lims.browser.widgets import DurationWidget
from bika.lims.browser.widgets import RecordsWidget
from bika.lims.browser.widgets import RejectionSetupWidget
-from bika.lims.config import ARIMPORT_OPTIONS
from bika.lims.config import ATTACHMENT_OPTIONS
from bika.lims.config import CURRENCIES
from bika.lims.config import WEEKDAYS
@@ -388,19 +387,6 @@ def getCounterTypes(self, instance=None):
format='select',
)
),
- LinesField(
- 'ARImportOption',
- schemata="Analyses",
- vocabulary=ARIMPORT_OPTIONS,
- widget=MultiSelectionWidget(
- visible=False,
- label=_("AR Import options"),
- description=_(
- "'Classic' indicates importing samples per sample and "
- "analysis service selection. With 'Profiles', analysis profile keywords "
- "are used to select multiple analysis services together"),
- )
- ),
StringField(
'ARAttachmentOption',
schemata="Analyses",
@@ -717,11 +703,6 @@ def getCounterTypes(self, instance=None):
schemata="ID Server",
default=[
{
- 'form': 'AI-{seq:03d}',
- 'portal_type': 'ARImport',
- 'sequence_type': 'generated',
- 'split_length': 1
- }, {
'form': 'B-{seq:03d}',
'portal_type': 'Batch',
'prefix': 'batch',
diff --git a/bika/lims/content/client.py b/bika/lims/content/client.py
index f6dc8f5853..030b3e46c4 100644
--- a/bika/lims/content/client.py
+++ b/bika/lims/content/client.py
@@ -39,7 +39,6 @@
from bika.lims.browser.fields import EmailsField
from bika.lims.browser.widgets import ReferenceWidget
from bika.lims.catalog.bikasetup_catalog import SETUP_CATALOG
-from bika.lims.config import ARIMPORT_OPTIONS
from bika.lims.config import DECIMAL_MARKS
from bika.lims.config import PROJECTNAME
from bika.lims.content.attachment import Attachment
@@ -197,11 +196,6 @@ def getContactUIDForUser(self):
if len(r) == 1:
return r[0].UID
- security.declarePublic("getARImportOptions")
-
- def getARImportOptions(self):
- return ARIMPORT_OPTIONS
-
def getContacts(self, only_active=True):
"""Return an array containing the contacts from this Client
"""
diff --git a/bika/lims/content/invoicebatch.py b/bika/lims/content/invoicebatch.py
deleted file mode 100644
index 1302ed4b6d..0000000000
--- a/bika/lims/content/invoicebatch.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of SENAITE.CORE.
-#
-# SENAITE.CORE is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free Software
-# Foundation, version 2.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
-# details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Copyright 2018-2020 by it's authors.
-# Some rights reserved, see README and LICENSE.
-
-from bika.lims.config import PROJECTNAME
-from bika.lims.interfaces import IInvoiceBatch
-from Products.Archetypes.public import registerType
-from Products.Archetypes.public import BaseFolder
-from zope.interface import implements
-
-
-class InvoiceBatch(BaseFolder):
- """REMOVE AFTER 1.3
- """
- implements(IInvoiceBatch)
-
-
-registerType(InvoiceBatch, PROJECTNAME)
diff --git a/bika/lims/content/invoicefolder.py b/bika/lims/content/invoicefolder.py
deleted file mode 100644
index a24a33c925..0000000000
--- a/bika/lims/content/invoicefolder.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of SENAITE.CORE.
-#
-# SENAITE.CORE is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free Software
-# Foundation, version 2.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
-# details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Copyright 2018-2020 by it's authors.
-# Some rights reserved, see README and LICENSE.
-
-from bika.lims.config import PROJECTNAME
-from bika.lims.interfaces import IInvoiceFolder
-from Products.Archetypes import atapi
-from Products.ATContentTypes.content import folder
-from zope.interface import implements
-
-
-class InvoiceFolder(folder.ATFolder):
- """REMOVE AFTER 1.3
- """
- implements(IInvoiceFolder)
-
-
-atapi.registerType(InvoiceFolder, PROJECTNAME)
diff --git a/bika/lims/content/sample.py b/bika/lims/content/sample.py
deleted file mode 100644
index 00cdc80010..0000000000
--- a/bika/lims/content/sample.py
+++ /dev/null
@@ -1,481 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of SENAITE.CORE.
-#
-# SENAITE.CORE is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free Software
-# Foundation, version 2.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
-# details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Copyright 2018-2020 by it's authors.
-# Some rights reserved, see README and LICENSE.
-
-"""Sample represents a physical sample submitted for testing
-"""
-
-from datetime import timedelta
-from AccessControl import ClassSecurityInfo
-from bika.lims import bikaMessageFactory as _
-from bika.lims.api import get_object_by_uid
-from bika.lims.browser.fields.remarksfield import RemarksField
-from bika.lims.browser.fields.uidreferencefield import get_backreferences
-from bika.lims.utils import t, getUsers
-from Products.ATExtensions.field import RecordsField
-from bika.lims.browser.widgets.datetimewidget import DateTimeWidget
-from bika.lims.browser.widgets import RejectionWidget
-from bika.lims.browser.widgets import RemarksWidget
-from bika.lims.config import PROJECTNAME
-from bika.lims.content.bikaschema import BikaSchema
-from bika.lims.interfaces import ISample
-from Products.Archetypes import atapi
-from Products.Archetypes.public import *
-from Products.Archetypes.references import HoldingReference
-from Products.ATContentTypes.lib.historyaware import HistoryAwareMixin
-from Products.ATContentTypes.utils import DT2dt, dt2DT
-from Products.CMFCore import permissions
-from Products.CMFPlone.utils import safe_unicode
-from zope.interface import implements
-
-from bika.lims.browser.fields import DateTimeField
-from bika.lims.browser.widgets import ReferenceWidget
-from bika.lims.browser.widgets import SelectionWidget as BikaSelectionWidget
-
-import sys
-from bika.lims.utils import to_unicode
-from bika.lims.interfaces import IDoNotSupportSnapshots
-
-schema = BikaSchema.copy() + Schema((
- # TODO This field is only for v1.3.0 migration purposes
- # bika_catalog contains an "isValid" index. We will take advantage of this
- # index to keep track of the Samples that have been migrated already in
- # order to prevent an unnecessary reimport when v1.3.0 is rerun.
- # This field is used by `isValid` function
- BooleanField('Migrated',
- default = False,
- ),
- StringField('SampleID',
- required=1,
- searchable=True,
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget=StringWidget(
- label=_("Sample ID"),
- description=_("The ID assigned to the client's sample by the lab"),
- visible=False,
- render_own_label=True,
- ),
- ),
- StringField('ClientReference',
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget=StringWidget(
- label=_("Client Reference"),
- visible=False,
- render_own_label=True,
- ),
- ),
- StringField('ClientSampleID',
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget=StringWidget(
- label=_("Client SID"),
- visible=False,
- render_own_label=True,
- ),
- ),
- ReferenceField('SampleType',
- required=1,
- vocabulary_display_path_bound=sys.maxsize,
- allowed_types=('SampleType',),
- relationship='SampleSampleType',
- referenceClass=HoldingReference,
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget=ReferenceWidget(
- label=_("Sample Type"),
- render_own_label=True,
- visible=False,
- catalog_name='bika_setup_catalog',
- base_query={'is_active': True},
- showOn=True,
- ),
- ),
- ComputedField('SampleTypeTitle',
- expression="here.getSampleType() and here.getSampleType().Title() or ''",
- widget=ComputedWidget(
- visible=False,
- ),
- ),
- ReferenceField('SamplePoint',
- vocabulary_display_path_bound=sys.maxsize,
- allowed_types=('SamplePoint',),
- relationship = 'SampleSamplePoint',
- referenceClass = HoldingReference,
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget=ReferenceWidget(
- label=_("Sample Point"),
- render_own_label=True,
- visible=False,
- catalog_name='bika_setup_catalog',
- base_query={'is_active': True},
- showOn=True,
- ),
- ),
- ComputedField('SamplePointTitle',
- expression = "here.getSamplePoint() and here.getSamplePoint().Title() or ''",
- widget = ComputedWidget(
- visible=False,
- ),
- ),
- ReferenceField(
- 'StorageLocation',
- allowed_types='StorageLocation',
- relationship='AnalysisRequestStorageLocation',
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget=ReferenceWidget(
- label=_("Storage Location"),
- description=_("Location where sample is kept"),
- size=20,
- render_own_label=True,
- visible=False,
- catalog_name='bika_setup_catalog',
- base_query={'is_active': True},
- showOn=True,
- ),
- ),
- BooleanField('SamplingWorkflowEnabled',
- default_method='getSamplingWorkflowEnabledDefault'
- ),
- DateTimeField('DateSampled',
- mode="rw",
- read_permission=permissions.View,
- widget = DateTimeWidget(
- label=_("Date Sampled"),
- show_time=True,
- size=20,
- visible=False,
- render_own_label=True,
- ),
- ),
- StringField('Sampler',
- mode="rw",
- read_permission=permissions.View,
- vocabulary='getSamplers',
- widget=BikaSelectionWidget(
- format='select',
- label=_("Sampler"),
- visible=False,
- render_own_label=True,
- ),
- ),
- StringField('ScheduledSamplingSampler',
- mode="rw",
- read_permission=permissions.View,
- vocabulary='getSamplers',
- widget=BikaSelectionWidget(
- description=_("Define the sampler supposed to do the sample in "
- "the scheduled date"),
- format='select',
- label=_("Sampler for scheduled sampling"),
- visible=False,
- render_own_label=True,
- ),
- ),
- DateTimeField('SamplingDate',
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget = DateTimeWidget(
- label=_("Expected Sampling Date"),
- description=_("Define when the sampler has to take the samples"),
- show_time=True,
- visible=False,
- render_own_label=True,
- ),
- ),
- ReferenceField('SamplingDeviation',
- vocabulary_display_path_bound = sys.maxsize,
- allowed_types = ('SamplingDeviation',),
- relationship = 'SampleSamplingDeviation',
- referenceClass = HoldingReference,
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget=ReferenceWidget(
- label=_("Sampling Deviation"),
- render_own_label=True,
- visible=False,
- catalog_name='bika_setup_catalog',
- base_query={'is_active': True},
- showOn=True,
- ),
- ),
- ReferenceField('SampleCondition',
- vocabulary_display_path_bound = sys.maxsize,
- allowed_types = ('SampleCondition',),
- relationship = 'SampleSampleCondition',
- referenceClass = HoldingReference,
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget=ReferenceWidget(
- label=_("Sample Condition"),
- render_own_label=True,
- visible=False,
- catalog_name='bika_setup_catalog',
- base_query={'is_active': True},
- showOn=True,
- ),
- ),
- StringField(
- 'EnvironmentalConditions',
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget=StringWidget(
- label=_("Environmental Conditions"),
- visible=False,
- render_own_label=True,
- size=20,
- ),
- ),
- # Another way to obtain a transition date is using getTransitionDate
- # function. We are using a DateTimeField/Widget here because in some
- # cases the user may want to change the Received Date.
- # AnalysisRequest and Sample's DateReceived fields needn't to have
- # the same value.
- # This field is updated in workflow_script_receive method.
- DateTimeField('DateReceived',
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget = DateTimeWidget(
- label=_("Date Received"),
- show_time=True,
- datepicker_nofuture=1,
- visible=False,
- render_own_label=True,
- ),
- ),
- ComputedField('ClientUID',
- expression = 'context.aq_parent.UID()',
- widget = ComputedWidget(
- visible=False,
- ),
- ),
- ComputedField('SampleTypeUID',
- expression='context.getSampleType() and \
- context.getSampleType().UID() or None',
- widget=ComputedWidget(
- visible=False,
- ),
- ),
- ComputedField('SamplePointUID',
- expression = 'context.getSamplePoint() and context.getSamplePoint().UID() or None',
- widget = ComputedWidget(
- visible=False,
- ),
- ),
- BooleanField('Composite',
- default = False,
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget = BooleanWidget(
- label=_("Composite"),
- visible=False,
- render_own_label=True,
- ),
- ),
- DateTimeField('DateExpired',
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget = DateTimeWidget(
- label=_("Date Expired"),
- visible=False,
- render_own_label=True,
- ),
- ),
- ComputedField('DisposalDate',
- expression = 'context.disposal_date()',
- widget=DateTimeWidget(
- visible=False,
- render_own_label=True,
- ),
- ),
- DateTimeField('DateDisposed',
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget = DateTimeWidget(
- label=_("Date Disposed"),
- visible=False,
- render_own_label=True,
- ),
- ),
- BooleanField('AdHoc',
- default=False,
- mode="rw",
- read_permission=permissions.View,
- write_permission=permissions.ModifyPortalContent,
- widget=BooleanWidget(
- label=_("Ad-Hoc"),
- visible=False,
- render_own_label=True,
- ),
- ),
- RemarksField(
- 'Remarks',
- widget=RemarksWidget(
- label=_("Remarks"),
- ),
- ),
- RecordsField(
- 'RejectionReasons',
- widget = RejectionWidget(
- label=_("Sample Rejection"),
- description = _("Set the Sample Rejection workflow and the reasons"),
- render_own_label=False,
- visible=False,
- ),
- ),
-))
-
-
-schema['title'].required = False
-
-
-class Sample(BaseFolder, HistoryAwareMixin):
- implements(ISample, IDoNotSupportSnapshots)
- security = ClassSecurityInfo()
- displayContentsTab = False
- schema = schema
-
- _at_rename_after_creation = True
-
- def _renameAfterCreation(self, check_auto_id=False):
- from bika.lims.idserver import renameAfterCreation
- renameAfterCreation(self)
-
- def _getCatalogTool(self):
- from bika.lims.catalog import getCatalog
- return getCatalog(self)
-
- def getSampleID(self):
- """ Return the Sample ID as title """
- return safe_unicode(self.getId()).encode('utf-8')
-
- def Title(self):
- """ Return the Sample ID as title """
- return self.getSampleID()
-
- def getSamplingWorkflowEnabledDefault(self):
- return self.bika_setup.getSamplingWorkflowEnabled()
-
- def getContactTitle(self):
- return ""
-
- def getClientTitle(self):
- proxies = self.getAnalysisRequests()
- if not proxies:
- return ""
- value = proxies[0].aq_parent.Title()
- return value
-
- def getProfilesTitle(self):
- return ""
-
- def getAnalysisService(self):
- analyses = []
- for ar in self.getAnalysisRequests():
- analyses += list(ar.getAnalyses(full_objects=True))
- value = []
- for analysis in analyses:
- val = analysis.Title()
- if val not in value:
- value.append(val)
- return value
-
- def getAnalysts(self):
- analyses = []
- for ar in self.getAnalysisRequests():
- analyses += list(ar.getAnalyses(full_objects=True))
- value = []
- for analysis in analyses:
- val = analysis.getAnalyst()
- if val not in value:
- value.append(val)
- return value
-
- security.declarePublic('getAnalysisRequests')
-
- def getAnalysisRequests(self):
- backrefs = get_backreferences(self, 'AnalysisRequestSample')
- ars = map(get_object_by_uid, backrefs)
- return ars
-
- security.declarePublic('getAnalyses')
-
- def getAnalyses(self, contentFilter=None, **kwargs):
- """ return list of all analyses against this sample
- """
- # contentFilter and kwargs are combined. They both exist for
- # compatibility between the two signatures; kwargs has been added
- # to be compatible with how getAnalyses() is used everywhere else.
- cf = contentFilter if contentFilter else {}
- cf.update(kwargs)
- analyses = []
- for ar in self.getAnalysisRequests():
- analyses.extend(ar.getAnalyses(**cf))
- return analyses
-
- def getSamplers(self):
- return getUsers(self, ['Sampler', ])
-
- def disposal_date(self):
- """Returns the date the retention period ends for this sample based on
- the retention period from the Sample Type. If the sample hasn't been
- collected yet, returns None
- """
- date_sampled = self.getDateSampled()
- if not date_sampled:
- return None
-
- # TODO Preservation - preservation's retention period has priority over
- # sample type's preservation period
-
- retention_period = self.getSampleType().getRetentionPeriod() or {}
- retention_period_delta = timedelta(
- days=int(retention_period.get("days", 0)),
- hours=int(retention_period.get("hours", 0)),
- minutes=int(retention_period.get("minutes", 0))
- )
- return dt2DT(DT2dt(date_sampled) + retention_period_delta)
-
-
- # TODO This method is only for v1.3.0 migration purposes
- # bika_catalog contains an "isValid" index. We will take advantage of this
- # index to keep track of the Samples that have been migrated already in
- # order to prevent an unnecessary reimport when v1.3.0 is rerun.
- def isValid(self):
- return self.getMigrated()
-
-
-atapi.registerType(Sample, PROJECTNAME)
diff --git a/bika/lims/content/samplepartition.py b/bika/lims/content/samplepartition.py
deleted file mode 100644
index 3df9a60d18..0000000000
--- a/bika/lims/content/samplepartition.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of SENAITE.CORE.
-#
-# SENAITE.CORE is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free Software
-# Foundation, version 2.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
-# details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Copyright 2018-2020 by it's authors.
-# Some rights reserved, see README and LICENSE.
-
-
-from AccessControl import ClassSecurityInfo
-from Products.ATContentTypes.lib.historyaware import HistoryAwareMixin
-from Products.Archetypes.public import BaseContent
-from Products.Archetypes.public import BooleanField
-from Products.Archetypes.public import DateTimeField
-from Products.Archetypes.public import ReferenceField
-from Products.Archetypes.public import Schema
-from Products.Archetypes.public import StringField
-from Products.Archetypes.public import registerType
-from Products.CMFPlone.utils import safe_unicode
-from bika.lims.browser.fields import DurationField
-from bika.lims.browser.fields import UIDReferenceField
-from bika.lims.config import PROJECTNAME
-from bika.lims.content.bikaschema import BikaSchema
-from bika.lims.interfaces import ISamplePartition
-from zope.interface import implements
-from bika.lims.interfaces import IDoNotSupportSnapshots
-
-schema = BikaSchema.copy() + Schema((
- ReferenceField('Container',
- allowed_types=('Container',),
- relationship='SamplePartitionContainer',
- required=1,
- multiValued=0,
- ),
- ReferenceField('Preservation',
- allowed_types=('Preservation',),
- relationship='SamplePartitionPreservation',
- required=0,
- multiValued=0,
- ),
- BooleanField('Separate',
- default=False
- ),
- UIDReferenceField('Analyses',
- allowed_types=('Analysis',),
- required=0,
- multiValued=1,
- ),
- DateTimeField('DatePreserved',
- ),
- StringField('Preserver',
- searchable=True
- ),
- DurationField('RetentionPeriod',
- ),
-)
-)
-
-schema['title'].required = False
-
-
-class SamplePartition(BaseContent, HistoryAwareMixin):
- implements(ISamplePartition, IDoNotSupportSnapshots)
- security = ClassSecurityInfo()
- displayContentsTab = False
- schema = schema
-
- _at_rename_after_creation = True
-
- def _renameAfterCreation(self, check_auto_id=False):
- from bika.lims.idserver import renameAfterCreation
- renameAfterCreation(self)
-
- def Title(self):
- """ Return the Sample ID as title """
- return safe_unicode(self.getId()).encode('utf-8')
-
-
-registerType(SamplePartition, PROJECTNAME)
diff --git a/bika/lims/content/samplesfolder.py b/bika/lims/content/samplesfolder.py
deleted file mode 100644
index f134f9e49f..0000000000
--- a/bika/lims/content/samplesfolder.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of SENAITE.CORE.
-#
-# SENAITE.CORE is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free Software
-# Foundation, version 2.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
-# details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Copyright 2018-2020 by it's authors.
-# Some rights reserved, see README and LICENSE.
-
-"""SamplesFolder is a fake folder to live in the nav bar. It has
-view from browser/sample.py/SamplesView wired to it.
-"""
-from Products.ATContentTypes.content import schemata
-from Products.Archetypes import atapi
-from Products.CMFCore import permissions
-from Products.CMFCore.utils import getToolByName
-from bika.lims.config import PROJECTNAME
-from AccessControl import ClassSecurityInfo
-from bika.lims.interfaces import ISamplesFolder, IHaveNoBreadCrumbs
-from plone.app.folder import folder
-from zope.interface import implements
-from bika.lims import bikaMessageFactory as _
-from bika.lims.utils import t
-
-schema = folder.ATFolderSchema.copy()
-
-class SamplesFolder(folder.ATFolder):
- implements(ISamplesFolder, IHaveNoBreadCrumbs)
- displayContentsTab = False
- schema = schema
- security = ClassSecurityInfo()
-
-schemata.finalizeATCTSchema(schema, folderish = True, moveDiscussion = False)
-
-atapi.registerType(SamplesFolder, PROJECTNAME)
diff --git a/bika/lims/interfaces/__init__.py b/bika/lims/interfaces/__init__.py
index 78cb9e5ad0..fde2a5415c 100644
--- a/bika/lims/interfaces/__init__.py
+++ b/bika/lims/interfaces/__init__.py
@@ -194,11 +194,6 @@ class IReportFolder(Interface):
"""Report folder
"""
-# TODO Remove in >v1.3.0
-class ISample(Interface):
- """Sample
- """
-
class ISampleCondition(Interface):
"""Sample Condition
@@ -220,16 +215,6 @@ class ISampleMatrices(Interface):
"""
-class ISamplePartition(Interface):
- """Sample
- """
-
-
-class ISamplesFolder(Interface):
- """Samples Folder
- """
-
-
class ISamplingDeviation(Interface):
"""Sampling Deviation
"""
@@ -270,16 +255,6 @@ class IInvoice(Interface):
"""
-class IInvoiceBatch(Interface):
- """Invoice Batch
- """
-
-
-class IInvoiceFolder(Interface):
- """Invoices Folder
- """
-
-
class IBikaSetup(Interface):
"""Marker interface for the LIMS Setup
"""
@@ -745,16 +720,6 @@ class ISetupDataImporter(Interface):
"""
-class IARImportFolder(Interface):
- """Marker interface for a folder that contains ARImports
- """
-
-
-class IARImport(Interface):
- """Marker interface for an ARImport
- """
-
-
class IPricelist(Interface):
"""Folder view marker for Pricelist
"""
diff --git a/bika/lims/monkey/contentmenu.py b/bika/lims/monkey/contentmenu.py
index fadfc532c2..f830799c90 100644
--- a/bika/lims/monkey/contentmenu.py
+++ b/bika/lims/monkey/contentmenu.py
@@ -23,7 +23,6 @@ def contentmenu_factories_available(self):
"""
if hasattr(self._addContext(), 'portal_type') \
and self._addContext().portal_type in [
- 'ARImport',
'Batch',
'Client',
'AnalysisRequest',
diff --git a/bika/lims/permissions.py b/bika/lims/permissions.py
index 9300c65cfd..73338f2439 100644
--- a/bika/lims/permissions.py
+++ b/bika/lims/permissions.py
@@ -181,7 +181,6 @@
EditWorksheet = "senaite.core: Edit Worksheet"
ManageBika = "senaite.core: Manage Bika"
ManageAnalysisRequests = "senaite.core: Manage Analysis Requests"
-ManageARImport = "senaite.core: Manage ARImport"
ManageInvoices = "senaite.core: Manage Invoices"
ManageLoginDetails = "senaite.core: Manage Login Details"
ManageReference = "senaite.core: Manage Reference"
diff --git a/bika/lims/permissions.zcml b/bika/lims/permissions.zcml
index fd54bde24f..d74e03db6b 100644
--- a/bika/lims/permissions.zcml
+++ b/bika/lims/permissions.zcml
@@ -152,7 +152,6 @@
-
diff --git a/bika/lims/profiles/default/factorytool.xml b/bika/lims/profiles/default/factorytool.xml
index fe96653ce9..83545eae20 100644
--- a/bika/lims/profiles/default/factorytool.xml
+++ b/bika/lims/profiles/default/factorytool.xml
@@ -1,7 +1,6 @@