Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

NMRL- 309 publishing not working #117

Merged
merged 2 commits into from
Jun 8, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 19 additions & 19 deletions bika/lims/browser/analysisrequest/publish.py
Original file line number Diff line number Diff line change
Expand Up @@ -457,14 +457,14 @@ def publishFromHTML(self, aruid, results_html):
return [ar]

def publish(self):
"""Publish the AR report/s. Generates a results pdf file associated
to each AR, sends an email with the report to the lab manager and
sends a notification (usually an email with the PDF attached) to the
AR's contact and CCs. Transitions each published AR to statuses
'published', 'prepublished' or 'republished'. Returns a list with the
"""Publish the AR report/s. Generates a results pdf file associated
to each AR, sends an email with the report to the lab manager and
sends a notification (usually an email with the PDF attached) to the
AR's contact and CCs. Transitions each published AR to statuses
'published', 'prepublished' or 'republished'. Returns a list with the
AR identifiers that have been published/prepublished/republished
(only those 'verified', 'published' or at least have one 'verified'
result).
(only those 'verified', 'published' or at least have one 'verified'
result).
"""
if len(self._ars) > 1:
published_ars = []
Expand Down Expand Up @@ -670,27 +670,27 @@ def explode_data(self, data, padding=''):


class AnalysisRequestDigester:
"""Read AR data which could be useful during publication, into a data
dictionary. This class should be instantiated once, and the instance
called for all subsequent digestion. This allows the instance to cache
"""Read AR data which could be useful during publication, into a data
dictionary. This class should be instantiated once, and the instance
called for all subsequent digestion. This allows the instance to cache
data for objects that may be read multiple times for different ARs.

Passing overwrite=True when calling the instance will cause the
ar.Digest field to be overwritten with a new digestion. This flag
is set True by default in the EndRequestHandler that is responsible for
automated re-building.
It should be run once when the AR is verified (or when a verified AR is
modified) to pre-digest the data so that AnalysisRequestPublishView will

It should be run once when the AR is verified (or when a verified AR is
modified) to pre-digest the data so that AnalysisRequestPublishView will
run a little faster.

Note: ProxyFields are not included in the reading of the schema. If you
want to access sample fields in the report template, you must refer
directly to the correct field in the Sample data dictionary.

Note: ComputedFields are removed from the schema while creating the dict.
XXX: Add all metadata columns for the AR into the dict.

"""

def __init__(self):
Expand Down Expand Up @@ -718,7 +718,7 @@ def __call__(self, ar, overwrite=False):
return data

logger.info("=========== creating new data for %s" % ar)
import pdb;pdb.set_trace();pass

# Set data to the AR schema field, and return it.
data = self._ar_data(ar)
ar.setDigest(data)
Expand Down Expand Up @@ -841,8 +841,8 @@ def _format_address(self, address):
return "<div class='address'>%s</div>" % addr

def _workflow_data(self, instance):
"""Add some workflow information for all actions performed against
this instance. Only values for the last action event for any
"""Add some workflow information for all actions performed against
this instance. Only values for the last action event for any
transition will be set here, previous transitions will be ignored.

The default format for review_history is a list of lists; this function
Expand Down
4 changes: 2 additions & 2 deletions bika/lims/browser/js/bika.lims.loader.js
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ window.bika.lims.controllers = {
// Clients
".portaltype-client.template-base_edit":
['ClientEditView'],

"div.overlay #client-base-edit":
['ClientOverlayHandler'],

Expand Down Expand Up @@ -147,7 +147,7 @@ window.bika.lims.controllers = {
".analysisrequest_add_by_col": ['AnalysisRequestAddByCol'],

"#ar_publish_container":
['AnalysisRequestPublishView', 'RangeGraph'],
['RangeGraph', 'AnalysisRequestPublishView'],

// Samples PrintView
"#preview_container.samples_print_preview":
Expand Down
21 changes: 11 additions & 10 deletions bika/lims/workflow/analysis/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from bika.lims.workflow import getCurrentState
from bika.lims.workflow import isBasicTransitionAllowed
from bika.lims.workflow import wasTransitionPerformed
from bika.lims.workflow import skip


def after_submit(obj):
Expand Down Expand Up @@ -98,10 +99,10 @@ def after_verify(obj):


def after_publish(obj):
if skip(self, "publish"):
if skip(obj, "publish"):
return
workflow = getToolByName(obj, "portal_workflow")
state = workflow.getInfoFor(self, 'cancellation_state', 'active')
state = workflow.getInfoFor(obj, 'cancellation_state', 'active')
if state == "cancelled":
return False
endtime = DateTime()
Expand All @@ -126,32 +127,32 @@ def after_publish(obj):


def after_cancel(obj):
if skip(self, "cancel"):
if skip(obj, "cancel"):
return
workflow = getToolByName(obj, "portal_workflow")
# If it is assigned to a worksheet, unassign it.
state = workflow.getInfoFor(self, 'worksheetanalysis_review_state')
state = workflow.getInfoFor(obj, 'worksheetanalysis_review_state')
if state == 'assigned':
ws = obj.getWorksheet()
skip(self, "cancel", unskip=True)
ws.removeAnalysis(self)
skip(obj, "cancel", unskip=True)
ws.removeAnalysis(obj)
obj.reindexObject()


def after_reject(obj):
if skip(self, "reject"):
if skip(obj, "reject"):
return
workflow = getToolByName(obj, "portal_workflow")
# If it is assigned to a worksheet, unassign it.
state = workflow.getInfoFor(self, 'worksheetanalysis_review_state')
state = workflow.getInfoFor(obj, 'worksheetanalysis_review_state')
if state == 'assigned':
ws = obj.getWorksheet()
ws.removeAnalysis(self)
ws.removeAnalysis(obj)
obj.reindexObject()


def after_attach(obj):
if skip(self, "attach"):
if skip(obj, "attach"):
return
workflow = getToolByName(obj, "portal_workflow")
# If all analyses in this AR have been attached escalate the action
Expand Down