Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

NMRL-300 Prevent the creation of multiple attachment objects on results import #103

Merged
merged 10 commits into from
May 30, 2017
10 changes: 6 additions & 4 deletions bika/lims/browser/analyses.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from Products.ZCatalog.interfaces import ICatalogBrain
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from bika.lims.utils.analysis import format_numeric_result
from plone.api.portal import get_tool
from zope.interface import implements
from zope.interface import Interface
from zope.component import getAdapters
Expand Down Expand Up @@ -632,10 +633,11 @@ def folderitem(self, obj, item, index):
# If the analysis service has the option 'attachment' enabled
if can_add_attachment or can_view_result:
attachments = ""
if obj.hasAttachment:
# TODO-performance: This is vey time consuming
full_obj = full_obj if full_obj else obj.getObject()
for attachment in full_obj.getAttachment():
if obj.getAttachmentUIDs:
at_uids = obj.getAttachmentUIDs
uc = get_tool('uid_catalog')
attachments = [x.getObject() for x in uc(UID=at_uids)]
for attachment in attachments:
af = attachment.getAttachmentFile()
icon = af.icon
attachments +=\
Expand Down
2 changes: 1 addition & 1 deletion bika/lims/browser/analysisrequest/add.py
Original file line number Diff line number Diff line change
Expand Up @@ -514,7 +514,7 @@ def __call__(self):


from bika.lims import deprecated
@deprecated(comment="bika.lims.browser.analysisrequest.add."
@deprecated(comment="[160525] bika.lims.browser.analysisrequest.add."
"create_analysisrequest is deprecated and will be removed "
"in Bika LIMS 3.3", replacement=crar)
def create_analysisrequest(context, request, values):
Expand Down
2 changes: 1 addition & 1 deletion bika/lims/catalog/analysis_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
# Defining the columns for this catalog
_columns_list = [
'worksheetanalysis_review_state',
'getAttachmentUIDs',
'getRequestID',
'getReferenceAnalysesGroupID',
'getResultCaptureDate',
Expand Down Expand Up @@ -83,7 +84,6 @@
'getInstrumentUID',
'getAnalyst',
'getAnalystName',
'hasAttachment',
'getNumberOfRequiredVerifications',
'getNumberOfVerifications',
'isSelfVerificationEnabled',
Expand Down
11 changes: 5 additions & 6 deletions bika/lims/content/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -1537,14 +1537,13 @@ def getServiceDefaultInstrumentURL(self):
return ins.absolute_url_path()
return ''

def hasAttachment(self):
"""
It is used as a metacolumn.
Checks if the object has attachments or not.
Returns a boolean.
def getAttachmentUIDs(self):
"""Used to populate metadata, so that we don't need full objects of
analyses when working with their attachments.
"""
attachments = self.getAttachment()
return len(attachments) > 0
uids = [att.UID() for att in attachments]
return uids

def guard_sample_transition(self):
workflow = getToolByName(self, "portal_workflow")
Expand Down
4 changes: 2 additions & 2 deletions bika/lims/content/instrument.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,8 +399,8 @@ def getManufacturers(self):

from bika.lims import deprecated

@deprecated(comment="bika.lims.content.instrument.getMethodUID is \
deprecated and will be removed in Bika LIMS 3.3")
@deprecated(comment="[170214] bika.lims.content.instrument.getMethodUID "
"is deprecated and will be removed in Bika LIMS 3.3")
def getMethodUID(self):
# TODO Avoid using this function. Returns first method's UID for now.
if self.getMethods():
Expand Down
9 changes: 4 additions & 5 deletions bika/lims/content/labcontact.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from bika.lims.interfaces import ILabContact
from bika.lims import logger
from bika.lims import bikaMessageFactory as _

from bika.lims import deprecated

schema = Person.schema.copy() + atapi.Schema((
atapi.LinesField('PublicationPreference',
Expand Down Expand Up @@ -123,10 +123,9 @@ def hasUser(self):
self.getUsername()) is not None

# TODO: Remove getDepartment
from bika.lims import deprecated
@deprecated(comment="bika.lims.contant.labcontact.getDepartment "
"is deprecated and will be removed "
"in Bika LIMS 3.3. Please, use getDepartments intead")
@deprecated(comment="[161222] bika.lims.contant.labcontact.getDepartment "
"is deprecated and will be removed in Bika LIMS 3.3. "
"Please, use getDepartments intead")
def getDepartment(self):
"""
This function is a mirror for getDepartments to maintain the
Expand Down
135 changes: 86 additions & 49 deletions bika/lims/exportimport/instruments/resultsimport.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

from Products.CMFCore.utils import getToolByName
from Products.CMFPlone.utils import _createObjectByType, safe_unicode
from bika.lims import bikaMessageFactory as _
from bika.lims import bikaMessageFactory as _, logger
from bika.lims.utils import t
from bika.lims.exportimport.instruments.logger import Logger
from bika.lims.idserver import renameAfterCreation
Expand Down Expand Up @@ -372,7 +372,12 @@ def process(self):
if len(acodes) == 0:
self.err("Service keywords: no matches found")

searchcriteria = self.getIdSearchCriteria();
# Attachments will be created in any worksheet that contains
# analyses that are updated by this import
attachments = None
infile = self._parser.getInputFile()

searchcriteria = self.getIdSearchCriteria()
#self.log(_("Search criterias: %s") % (', '.join(searchcriteria)))
for objid, results in self._parser.getRawResults().iteritems():
# Allowed more than one result for the same sample and analysis.
Expand Down Expand Up @@ -461,6 +466,24 @@ def process(self):
continue

analysis = ans[0]

# Create attachment in worksheet linked to this analysis.
# Only if this import has not already created the attachment
# And only if the filename of the attachment is unique in
# this worksheet. Otherwise we will attempt to use existing
# attachment.
wss = analysis.getBackReferences('WorksheetAnalysis')
ws = wss[0] if wss else None
if ws:
if ws.getId() not in attachments:
fn = infile.filename
fn_attachments = self.get_attachment_filenames(ws)
if fn in fn_attachments:
attachments[ws.getId()] = fn_attachments[fn]
else:
attachments[ws.getId()] = \
self.create_attachment(ws, infile)

if capturedate:
values['DateTime'] = capturedate
processed = self._process_analysis(objid, analysis, values)
Expand All @@ -483,54 +506,14 @@ def process(self):
importedar.append(acode)
importedars[ar.getId()] = importedar

# Create the AttachmentType for mime type if not exists
attuid = None
attachmentType = self.bsc(portal_type="AttachmentType",
title=self._parser.getAttachmentFileType())
if len(attachmentType) == 0:
try:
folder = self.context.bika_setup.bika_attachmenttypes
obj = _createObjectByType("AttachmentType", folder, tmpID())
obj.edit(title=self._parser.getAttachmentFileType(),
description="Autogenerated file type")
obj.unmarkCreationFlag()
renameAfterCreation(obj)
attuid = obj.UID()
except:
attuid = None
self.err(
"Unable to create the Attachment Type ${mime_type}",
mapping={
"mime_type": self._parser.getFileMimeType()})
if ws:
self.attach_attachment(
analysis, attachments[ws.getId()])
else:
attuid = attachmentType[0].UID

if attuid is not None:
try:
# Attach the file to the Analysis
wss = analysis.getBackReferences('WorksheetAnalysis')
if wss and len(wss) > 0:
#TODO: Mirar si es pot evitar utilitzar el WS i utilitzar directament l'Anàlisi (útil en cas de CalibrationTest)
ws = wss[0]
attachment = _createObjectByType("Attachment", ws, tmpID())
attachment.edit(
AttachmentFile=self._parser.getInputFile(),
AttachmentType=attuid,
AttachmentKeys='Results, Automatic import')
attachment.reindexObject()
others = analysis.getAttachment()
attachments = []
for other in others:
if other.getAttachmentFile().filename != attachment.getAttachmentFile().filename:
attachments.append(other.UID())
attachments.append(attachment.UID())
analysis.setAttachment(attachments)

except:
# self.err(_("Unable to attach results file '${file_name}' to AR ${request_id}",
# mapping={"file_name": self._parser.getInputFile().filename,
# "request_id": ar.getId()}))
pass
self.warn(
"Attachment cannot be linked to analysis as "
"it is not assigned to a worksheet (%s)" %
analysis)

for arid, acodes in importedars.iteritems():
acodesmsg = ["Analysis %s" % acod for acod in acodes]
Expand Down Expand Up @@ -558,6 +541,60 @@ def process(self):
mapping={"nr_updated_ars": str(len(importedars)),
"nr_updated_results": str(ancount)})

def create_mime_attachmenttype(self):
# Create the AttachmentType for mime type if not exists
attachmentType = self.bsc(portal_type="AttachmentType",
title=self._parser.getAttachmentFileType())
if not attachmentType:
folder = self.context.bika_setup.bika_attachmenttypes
obj = _createObjectByType("AttachmentType", folder, tmpID())
obj.edit(title=self._parser.getAttachmentFileType(),
description="Autogenerated file type")
obj.unmarkCreationFlag()
renameAfterCreation(obj)
attuid = obj.UID()
else:
attuid = attachmentType[0].UID
return attuid

def create_attachment(self, ws, infile):
attuid = self.create_mime_attachmenttype()
attachment = None
if attuid:
attachment = _createObjectByType("Attachment", ws, tmpID())
logger.info("Creating %s in %s" % (attachment, ws))
fn = inf.filename
attachment.edit(
AttachmentFile=fn,
AttachmentType=attuid,
AttachmentKeys='Results, Automatic import')
attachment.setId(fn)
return attachment

def attach_attachment(self, analysis, attachment):
if attachment:
an_atts = analysis.getAttachment()
attachments = []
for an_att in an_atts:
if an_att.getAttachmentFile().filename != \
attachment.getAttachmentFile().filename:
logger.info("Attaching %s to %s" % (an_att.UID(), analysis))
attachments.append(attachment.UID())
analysis.setAttachment(attachments)
break
else:
self.warn("Attachment %s was not linked to analysis %s" %
(attachment, analysis))

def get_attachment_filenames(self, ws):
fn_attachments = {}
for att in ws.objectValues('Attachment'):
fn = att.getAttachmentFile().filename
if fn not in fn_attachments:
fn_attachments[fn] = []
fn_attachments[fn].append(att)
return fn_attachments

def _getObjects(self, objid, criteria, states):
#self.log("Criteria: %s %s") % (criteria, obji))
obj = []
Expand Down
69 changes: 64 additions & 5 deletions bika/lims/upgrade/v3_2_0_1705.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,16 @@
from bika.lims import logger
from bika.lims.upgrade import upgradestep
from bika.lims.upgrade.utils import UpgradeUtils
from bika.lims.catalog import CATALOG_ANALYSIS_LISTING
from bika.lims.catalog import CATALOG_ANALYSIS_LISTING, \
CATALOG_WORKSHEET_LISTING
from bika.lims.catalog import CATALOG_ANALYSIS_REQUEST_LISTING
from bika.lims.config import VERSIONABLE_TYPES
from Products.CMFCore.utils import getToolByName
from bika.lims.upgrade.utils import migrate_to_blob
import traceback
import sys
import transaction
from plone.api.portal import get_tool

product = 'bika.lims'
version = '3.2.0.1705'
Expand All @@ -27,14 +29,19 @@ def upgrade(tool):
ufrom = ut.getInstalledVersion(product)
if ut.isOlderVersion(product, version):
logger.info("Skipping upgrade of {0}: {1} > {2}".format(
product, ufrom, version))
product, ufrom, version))
# The currently installed version is more recent than the target
# version of this upgradestep
return True

logger.info("Upgrading {0}: {1} -> {2}".format(product, ufrom, version))

# Remove duplicate attachments made by instrument imports
remove_attachment_duplicates(portal, pgthreshold=1000)

# Migrating ataip.FileField to blob.FileField
migareteFileFields(portal)

# Remove versionable types
logger.info("Removing versionable types...")
portal_repository = getToolByName(portal, 'portal_repository')
Expand All @@ -44,7 +51,7 @@ def upgrade(tool):
'SamplePoint',
'SampleType',
'StorageLocation',
'WorksheetTemplate',]
'WorksheetTemplate', ]
versionable = list(portal_repository.getVersionableContentTypes())
vers = [ver for ver in versionable if ver not in non_versionable]
portal_repository.setVersionableContentTypes(vers)
Expand Down Expand Up @@ -107,17 +114,69 @@ def migareteFileFields(portal):
"Method",
"Multifile",
"Report",
"ARReport",
"SamplePoint"]
for portal_type in portal_types:
logger.info(
"Starting migration of FileField fields from {}."
.format(portal_type))
.format(portal_type))
# Do the migration
migrate_to_blob(
portal,
portal_type=portal_type,
remove_old_value=True)
logger.info(
"Finished migration of FileField fields from {}."
.format(portal_type))
.format(portal_type))


def remove_attachment_duplicates(portal, pgthreshold=1000):
"""Visit every worksheet attachment, and remove duplicates.
The duplicates are filtered by filename, but that's okay because the
instrument import routine used filenames when it made them.
"""
pc = get_tool('portal_catalog')
wc = get_tool(CATALOG_WORKSHEET_LISTING)

# get all worksheets.
brains = wc(portal_type='Worksheet')
# list of lists.
dup_ans = [] # [fn, primary attachment, duplicate attachment, worksheet]
primaries = {} # key is wsID:fn. stores first found instance.
# for each worksheet, get all attachments.
dups_found = 0
for brain in brains:
ws = brain.getObject()
ws_id = ws.getId()
# for each attachment:
atts = ws.objectValues('Attachment')
for att in atts:
# Only process each fn once:
fn = att.getAttachmentFile().filename
key = "%s:%s" % (ws_id, fn)
if key not in primaries:
# not a dup. att is primary attachment for this key.
primaries[key] = att
# we are a duplicate.
dup_ans.append([fn, primaries[key], att, ws])
dups_found += 1
logger.info("Keeping {} and removing {} attachments".format(
len(primaries), dups_found))

# Now.
count = 0
for fn, att, dup, ws in dup_ans:
ans = dup.getBackReferences()
for an in ans:
an_atts = [a for a in an.getAttachment() if a.UID() != dup.UID()]
an.setAttachment(an_atts)
path_uid = '/'.join(dup.getPhysicalPath())
pc.uncatalog_object(path_uid)
#dup.getField('AttachmentFile').set(dup, 'DELETED')
dup.getField('AttachmentFile').unset(dup)
dup.aq_parent.manage_delObjects(dup.getId())
#
if count % pgthreshold == 0:
logger.info("Removed {} of {} duplicate attachments...".format(
count, dups_found))
count += 1