diff --git a/CHANGES.rst b/CHANGES.rst index 73ba655edc..416583ce65 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -34,6 +34,7 @@ Changelog **Fixed** +- #1453 Fix initial IDs not starting with 1 - #1454 Fix occasional error when labeling samples w/o report as printed - #1452 Fix missing error percentage calculation for reference samples - #1447 New Client contact has access to last client's Sample only diff --git a/bika/lims/browser/idserver/configure.zcml b/bika/lims/browser/idserver/configure.zcml index a1883648fb..36fb77389b 100644 --- a/bika/lims/browser/idserver/configure.zcml +++ b/bika/lims/browser/idserver/configure.zcml @@ -20,13 +20,4 @@ layer="bika.lims.interfaces.IBikaLIMS" /> - - diff --git a/bika/lims/browser/idserver/templates/numbergenerator.pt b/bika/lims/browser/idserver/templates/numbergenerator.pt index 1c019ad136..e3d028628b 100644 --- a/bika/lims/browser/idserver/templates/numbergenerator.pt +++ b/bika/lims/browser/idserver/templates/numbergenerator.pt @@ -55,20 +55,12 @@ - - - diff --git a/bika/lims/browser/idserver/view.py b/bika/lims/browser/idserver/view.py index ece855878a..0b952bf4c9 100644 --- a/bika/lims/browser/idserver/view.py +++ b/bika/lims/browser/idserver/view.py @@ -78,13 +78,6 @@ def __call__(self): message = _("Seeding key {} to {}".format(key, value)) self.add_status_message(message, "info") - # Handle "Flush" action - if form.get("flush", False): - message = _("Flushed Number Storage") - self.add_status_message(message, "warning") - self.flush() - return self.template() - return self.template() def get_id_template_for(self, key): @@ -129,10 +122,3 @@ def seed(self): new_seq = self.set_seed(prefix, seed) return 'IDServerView: "%s" seeded to %s' % (prefix, new_seq) - - def flush(self): - """ Flush the storage - """ - number_generator = getUtility(INumberGenerator) - number_generator.flush() - return "IDServerView: Number storage flushed!" diff --git a/bika/lims/idserver.py b/bika/lims/idserver.py index 323f789790..b815f84887 100644 --- a/bika/lims/idserver.py +++ b/bika/lims/idserver.py @@ -342,23 +342,6 @@ def get_current_year(): return DateTime().strftime("%Y")[2:] -def search_by_prefix(portal_type, prefix): - """Returns brains which share the same portal_type and ID prefix - """ - catalog = api.get_tool("uid_catalog") - brains = catalog({"portal_type": portal_type}) - # Filter brains with the same ID prefix - return filter(lambda brain: api.get_id(brain).startswith(prefix), brains) - - -def get_ids_with_prefix(portal_type, prefix): - """Return a list of ids sharing the same portal type and prefix - """ - brains = search_by_prefix(portal_type, prefix) - ids = map(api.get_id, brains) - return ids - - def make_storage_key(portal_type, prefix=None): """Make a storage (dict-) key for the number generator """ @@ -452,18 +435,6 @@ def get_generated_number(context, config, variables, **kw): # The key used for the storage key = make_storage_key(portal_type, prefix) - # Handle flushed storage - if key not in number_generator: - max_num = 0 - existing = get_ids_with_prefix(portal_type, prefix) - numbers = map(lambda id: get_seq_number_from_id(id, id_template, prefix), existing) - # figure out the highest number in the sequence - if numbers: - max_num = max(numbers) - # set the number generator - logger.info("*** SEEDING Prefix '{}' to {}".format(prefix, max_num)) - number_generator.set_number(key, max_num) - if not kw.get("dry_run", False): # Generate a new number # NOTE Even when the number exceeds the given ID sequence format, @@ -532,8 +503,10 @@ def renameAfterCreation(obj): """ # Can't rename without a subtransaction commit when using portal_factory transaction.savepoint(optimistic=True) + # The id returned should be normalized already new_id = None + # Checking if an adapter exists for this content type. If yes, we will # get new_id from adapter. for name, adapter in getAdapters((obj, ), IIdServer): @@ -544,17 +517,8 @@ def renameAfterCreation(obj): if not new_id: new_id = generateUniqueId(obj) - # TODO: This is a naive check just in current folder - # -> this should check globally for duplicate objects with same prefix - # N.B. a check like `search_by_prefix` each time would probably slow things - # down too much! - # -> A solution could be to store all IDs with a certain prefix in a storage - parent = api.get_parent(obj) - if new_id in parent.objectIds(): - # XXX We could do the check in a `while` loop and generate a new one. - raise KeyError("The ID {} is already taken in the path {}".format( - new_id, api.get_path(parent))) # rename the object to the new id + parent = api.get_parent(obj) parent.manage_renameObject(obj.id, new_id) return new_id diff --git a/bika/lims/tests/doctests/IDServer.rst b/bika/lims/tests/doctests/IDServer.rst index 13ddccd80c..880a9a2ead 100644 --- a/bika/lims/tests/doctests/IDServer.rst +++ b/bika/lims/tests/doctests/IDServer.rst @@ -230,10 +230,6 @@ Re-seed and create a new `Batch`: >>> batch = api.create(batches, "Batch", ClientID="RB") >>> batch.getId() == "BA-{}-0011".format(year) True - >>> browser.open(portal_url + '/ng_flush') - >>> ar = create_analysisrequest(client, request, values, service_uids) - >>> ar.getId() - 'RB-20170131-water-0002' Change ID formats and use alphanumeric ids: