diff --git a/attachment_db_by_checksum/README.rst b/attachment_db_by_checksum/README.rst new file mode 100644 index 0000000..29e75d1 --- /dev/null +++ b/attachment_db_by_checksum/README.rst @@ -0,0 +1,96 @@ +================================ +DB attachments saved by checksum +================================ + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:c73ebaaf529b35687e5e74d04c15be4c57e6fd38a47931733ced9bc6304b0b54 + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Beta-yellow.png + :target: https://odoo-community.org/page/development-status + :alt: Beta +.. |badge2| image:: https://img.shields.io/badge/licence-LGPL--3-blue.png + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-OCA%2Fstorage-lightgray.png?logo=github + :target: https://github.com/OCA/storage/tree/14.0/attachment_db_by_checksum + :alt: OCA/storage +.. |badge4| image:: https://img.shields.io/badge/weblate-Translate%20me-F47D42.png + :target: https://translation.odoo-community.org/projects/storage-14-0/storage-14-0-attachment_db_by_checksum + :alt: Translate me on Weblate +.. |badge5| image:: https://img.shields.io/badge/runboat-Try%20me-875A7B.png + :target: https://runboat.odoo-community.org/builds?repo=OCA/storage&target_branch=14.0 + :alt: Try me on Runboat + +|badge1| |badge2| |badge3| |badge4| |badge5| + +Allow to identify database attachments through their hash, avoiding duplicates. + +This is typically useful when you want to save attachments to database but you want to save space avoiding to write the same content in several attachments (think of email attachments, for example, or any file uploaded more than once). + +**Table of contents** + +.. contents:: + :local: + +Configuration +============= + +Set system parameter ``ir_attachment.location`` to ``hashed_db`` to activate saving by checksum. + +Run ``force_storage``, method of ``ir.attachment``, to move existing attachments. + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Authors +~~~~~~~ + +* TAKOBI + +Contributors +~~~~~~~~~~~~ + +* `TAKOBI `_: + + * Lorenzo Battistini + * Simone Rubino + +Maintainers +~~~~~~~~~~~ + +This module is maintained by the OCA. + +.. image:: https://odoo-community.org/logo.png + :alt: Odoo Community Association + :target: https://odoo-community.org + +OCA, or the Odoo Community Association, is a nonprofit organization whose +mission is to support the collaborative development of Odoo features and +promote its widespread use. + +.. |maintainer-eLBati| image:: https://github.com/eLBati.png?size=40px + :target: https://github.com/eLBati + :alt: eLBati + +Current `maintainer `__: + +|maintainer-eLBati| + +This module is part of the `OCA/storage `_ project on GitHub. + +You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute. diff --git a/attachment_db_by_checksum/__init__.py b/attachment_db_by_checksum/__init__.py new file mode 100644 index 0000000..2bc1433 --- /dev/null +++ b/attachment_db_by_checksum/__init__.py @@ -0,0 +1,3 @@ +# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). + +from . import models diff --git a/attachment_db_by_checksum/__manifest__.py b/attachment_db_by_checksum/__manifest__.py new file mode 100644 index 0000000..10feb34 --- /dev/null +++ b/attachment_db_by_checksum/__manifest__.py @@ -0,0 +1,21 @@ +# Copyright 2021 Lorenzo Battistini @ TAKOBI +# Copyright 2023 Simone Rubino - TAKOBI +# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). +{ + "name": "DB attachments saved by checksum", + "summary": "Allow to identify database attachments through their hash, avoiding duplicates", + "version": "14.0.1.0.0", + "category": "Storage", + "website": "https://github.com/OCA/storage", + "author": "TAKOBI, Odoo Community Association (OCA)", + "maintainers": [ + "eLBati", + ], + "license": "LGPL-3", + "depends": [ + "base", + ], + "data": [ + "security/ir.model.access.csv", + ], +} diff --git a/attachment_db_by_checksum/i18n/attachment_db_by_checksum.pot b/attachment_db_by_checksum/i18n/attachment_db_by_checksum.pot new file mode 100644 index 0000000..2de38d4 --- /dev/null +++ b/attachment_db_by_checksum/i18n/attachment_db_by_checksum.pot @@ -0,0 +1,88 @@ +# Translation of Odoo Server. +# This file contains the translation of the following modules: +# * attachment_db_by_checksum +# +msgid "" +msgstr "" +"Project-Id-Version: Odoo Server 14.0\n" +"Report-Msgid-Bugs-To: \n" +"Last-Translator: \n" +"Language-Team: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: \n" +"Plural-Forms: \n" + +#. module: attachment_db_by_checksum +#: model:ir.model,name:attachment_db_by_checksum.model_ir_attachment +msgid "Attachment" +msgstr "" + +#. module: attachment_db_by_checksum +#: model:ir.model,name:attachment_db_by_checksum.model_ir_attachment_content +msgid "Attachment content by hash" +msgstr "" + +#. module: attachment_db_by_checksum +#: model:ir.model.fields,help:attachment_db_by_checksum.field_ir_attachment_content__checksum +msgid "Checksum in the shape 2a/2a...\n" +msgstr "" + +#. module: attachment_db_by_checksum +#: model:ir.model.fields,field_description:attachment_db_by_checksum.field_ir_attachment_content__checksum +msgid "Checksum/SHA1" +msgstr "" + +#. module: attachment_db_by_checksum +#: model:ir.model.fields,field_description:attachment_db_by_checksum.field_ir_attachment_content__create_uid +msgid "Created by" +msgstr "" + +#. module: attachment_db_by_checksum +#: model:ir.model.fields,field_description:attachment_db_by_checksum.field_ir_attachment_content__create_date +msgid "Created on" +msgstr "" + +#. module: attachment_db_by_checksum +#: model:ir.model.fields,field_description:attachment_db_by_checksum.field_ir_attachment_content__db_datas +msgid "Database Data" +msgstr "" + +#. module: attachment_db_by_checksum +#: model:ir.model.fields,field_description:attachment_db_by_checksum.field_ir_attachment__display_name +#: model:ir.model.fields,field_description:attachment_db_by_checksum.field_ir_attachment_content__display_name +msgid "Display Name" +msgstr "" + +#. module: attachment_db_by_checksum +#: model:ir.model.fields,field_description:attachment_db_by_checksum.field_ir_attachment__id +#: model:ir.model.fields,field_description:attachment_db_by_checksum.field_ir_attachment_content__id +msgid "ID" +msgstr "" + +#. module: attachment_db_by_checksum +#: model:ir.model.fields,field_description:attachment_db_by_checksum.field_ir_attachment____last_update +#: model:ir.model.fields,field_description:attachment_db_by_checksum.field_ir_attachment_content____last_update +msgid "Last Modified on" +msgstr "" + +#. module: attachment_db_by_checksum +#: model:ir.model.fields,field_description:attachment_db_by_checksum.field_ir_attachment_content__write_uid +msgid "Last Updated by" +msgstr "" + +#. module: attachment_db_by_checksum +#: model:ir.model.fields,field_description:attachment_db_by_checksum.field_ir_attachment_content__write_date +msgid "Last Updated on" +msgstr "" + +#. module: attachment_db_by_checksum +#: code:addons/attachment_db_by_checksum/models/ir_attachment.py:0 +#, python-format +msgid "Only administrators can execute this action." +msgstr "" + +#. module: attachment_db_by_checksum +#: model:ir.model.constraint,message:attachment_db_by_checksum.constraint_ir_attachment_content_checksum_uniq +msgid "The checksum of the file must be unique!" +msgstr "" diff --git a/attachment_db_by_checksum/models/__init__.py b/attachment_db_by_checksum/models/__init__.py new file mode 100644 index 0000000..dc67916 --- /dev/null +++ b/attachment_db_by_checksum/models/__init__.py @@ -0,0 +1,4 @@ +# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). + +from . import ir_attachment_content +from . import ir_attachment diff --git a/attachment_db_by_checksum/models/ir_attachment.py b/attachment_db_by_checksum/models/ir_attachment.py new file mode 100644 index 0000000..cb2dd32 --- /dev/null +++ b/attachment_db_by_checksum/models/ir_attachment.py @@ -0,0 +1,136 @@ +# Copyright 2023 Simone Rubino - TAKOBI +# License LGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +import logging + +from odoo import _, api, models +from odoo.exceptions import AccessError +from odoo.osv import expression + +_logger = logging.getLogger(__name__) + +HASHED_STORAGE_PARAMETER = "hashed_db" + + +class Attachment(models.Model): + _inherit = "ir.attachment" + + @api.model + def _file_write_by_checksum(self, bin_value, checksum): + """Store attachment content in `Attachment content by hash`.""" + fname, full_path = self._get_path(bin_value, checksum) + attachment_content = self.env["ir.attachment.content"].search_by_checksum(fname) + if not attachment_content: + self.env["ir.attachment.content"].create( + { + "checksum": fname, + "db_datas": bin_value, + } + ) + return fname + + @api.model + def _file_write(self, bin_value, checksum): + location = self._storage() + if location == HASHED_STORAGE_PARAMETER: + return self._file_write_by_checksum(bin_value, checksum) + return super()._file_write(bin_value, checksum) + + @api.model + def _file_read_by_checksum(self, fname): + """Read attachment content from `Attachment content by hash`.""" + attachment_content = self.env["ir.attachment.content"].search_by_checksum(fname) + if attachment_content: + bin_value = attachment_content.db_datas + else: + # Fallback on standard behavior + _logger.debug("File %s not found" % fname) + bin_value = super()._file_read(fname) + return bin_value + + @api.model + def _file_read(self, fname): + location = self._storage() + if location == HASHED_STORAGE_PARAMETER: + return self._file_read_by_checksum(fname) + return super()._file_read(fname) + + @api.model + def _get_all_attachments_by_checksum_domain(self, fname=None): + """Get domain for finding all the attachments. + + If `checksum` is provided, + get domain for finding all the attachments having checksum `checksum`. + """ + # trick to get every attachment, see _search method of ir.attachment + domain = [ + ("id", "!=", 0), + ] + if fname is not None: + checksum_domain = [ + ("store_fname", "=", fname), + ] + domain = expression.AND( + [ + domain, + checksum_domain, + ] + ) + return domain + + @api.model + def _get_all_attachments_by_checksum(self, fname=None): + """Get all attachments. + + If `checksum` is provided, + get all the attachments having checksum `checksum`. + """ + domain = self._get_all_attachments_by_checksum_domain(fname) + invisible_menu_context = { + "ir.ui.menu.full_list": True, + } + attachments = self.with_context(**invisible_menu_context).search(domain) + return attachments + + @api.model + def _file_delete_by_checksum(self, fname): + """Delete attachment content in `Attachment content by hash`.""" + attachments = self._get_all_attachments_by_checksum(fname=fname) + if not attachments: + attachment_content = self.env["ir.attachment.content"].search_by_checksum( + fname + ) + attachment_content.unlink() + + @api.model + def _file_delete(self, fname): + location = self._storage() + if location == HASHED_STORAGE_PARAMETER: + self._file_delete_by_checksum(fname) + return super()._file_delete(fname) + + @api.model + def force_storage_by_checksum(self): + """Copy all the attachments to `Attachment content by hash`.""" + if not self.env.is_admin(): + raise AccessError(_("Only administrators can execute this action.")) + + # we don't know if previous storage was file system or DB: + # we run for every attachment + all_attachments = self._get_all_attachments_by_checksum() + for attach in all_attachments: + attach.write( + { + "datas": attach.datas, + # do not try to guess mimetype overwriting existing value + "mimetype": attach.mimetype, + } + ) + return True + + @api.model + def force_storage(self): + location = self._storage() + if location == HASHED_STORAGE_PARAMETER: + return self.force_storage_by_checksum() + return super().force_storage() diff --git a/attachment_db_by_checksum/models/ir_attachment_content.py b/attachment_db_by_checksum/models/ir_attachment_content.py new file mode 100644 index 0000000..2df5c3d --- /dev/null +++ b/attachment_db_by_checksum/models/ir_attachment_content.py @@ -0,0 +1,43 @@ +from odoo import fields, models + + +class AttachmentContent(models.Model): + _name = "ir.attachment.content" + _rec_name = "checksum" + _description = "Attachment content by hash" + + checksum = fields.Char( + string="Checksum/SHA1", + help="Checksum in the shape 2a/2a...\n", + index=True, + readonly=True, + required=True, + ) + db_datas = fields.Binary( + string="Database Data", + attachment=False, + ) + + _sql_constraints = [ + ( + "checksum_uniq", + "unique(checksum)", + "The checksum of the file must be unique!", + ), + ] + + def search_by_checksum(self, fname): + """Get Attachment content, searching by `fname`. + + Note that `fname` is the relative path of the attachment + as it would be saved by the core, for example 2a/2a..., + this is the same value that we store + in field `ir.attachment.content.checksum`. + """ + attachment_content = self.env["ir.attachment.content"].search( + [ + ("checksum", "=", fname), + ], + limit=1, + ) + return attachment_content diff --git a/attachment_db_by_checksum/readme/CONFIGURE.rst b/attachment_db_by_checksum/readme/CONFIGURE.rst new file mode 100644 index 0000000..3e6d704 --- /dev/null +++ b/attachment_db_by_checksum/readme/CONFIGURE.rst @@ -0,0 +1,3 @@ +Set system parameter ``ir_attachment.location`` to ``hashed_db`` to activate saving by checksum. + +Run ``force_storage``, method of ``ir.attachment``, to move existing attachments. diff --git a/attachment_db_by_checksum/readme/CONTRIBUTORS.rst b/attachment_db_by_checksum/readme/CONTRIBUTORS.rst new file mode 100644 index 0000000..c337ddd --- /dev/null +++ b/attachment_db_by_checksum/readme/CONTRIBUTORS.rst @@ -0,0 +1,4 @@ +* `TAKOBI `_: + + * Lorenzo Battistini + * Simone Rubino diff --git a/attachment_db_by_checksum/readme/DESCRIPTION.rst b/attachment_db_by_checksum/readme/DESCRIPTION.rst new file mode 100644 index 0000000..e386591 --- /dev/null +++ b/attachment_db_by_checksum/readme/DESCRIPTION.rst @@ -0,0 +1,3 @@ +Allow to identify database attachments through their hash, avoiding duplicates. + +This is typically useful when you want to save attachments to database but you want to save space avoiding to write the same content in several attachments (think of email attachments, for example, or any file uploaded more than once). diff --git a/attachment_db_by_checksum/security/ir.model.access.csv b/attachment_db_by_checksum/security/ir.model.access.csv new file mode 100644 index 0000000..bedf4ff --- /dev/null +++ b/attachment_db_by_checksum/security/ir.model.access.csv @@ -0,0 +1,4 @@ +"id","name","model_id:id","group_id:id","perm_read","perm_write","perm_create","perm_unlink" +"access_ir_attachment_all","Everyone can read Attachment Contents","model_ir_attachment_content",,1,0,0,0 +"access_ir_attachment_group_user","Internal Users can manage Attachment Contents","model_ir_attachment_content","base.group_user",1,1,1,1 +"access_ir_attachment_portal","Portal Users can read and create Attachment Contents","model_ir_attachment_content","base.group_portal",1,0,1,0 diff --git a/attachment_db_by_checksum/static/description/icon.png b/attachment_db_by_checksum/static/description/icon.png new file mode 100644 index 0000000..3a0328b Binary files /dev/null and b/attachment_db_by_checksum/static/description/icon.png differ diff --git a/attachment_db_by_checksum/static/description/index.html b/attachment_db_by_checksum/static/description/index.html new file mode 100644 index 0000000..bd57551 --- /dev/null +++ b/attachment_db_by_checksum/static/description/index.html @@ -0,0 +1,434 @@ + + + + + + +DB attachments saved by checksum + + + +
+

DB attachments saved by checksum

+ + +

Beta License: LGPL-3 OCA/storage Translate me on Weblate Try me on Runboat

+

Allow to identify database attachments through their hash, avoiding duplicates.

+

This is typically useful when you want to save attachments to database but you want to save space avoiding to write the same content in several attachments (think of email attachments, for example, or any file uploaded more than once).

+

Table of contents

+ +
+

Configuration

+

Set system parameter ir_attachment.location to hashed_db to activate saving by checksum.

+

Run force_storage, method of ir.attachment, to move existing attachments.

+
+
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Authors

+
    +
  • TAKOBI
  • +
+
+
+

Contributors

+ +
+
+

Maintainers

+

This module is maintained by the OCA.

+Odoo Community Association +

OCA, or the Odoo Community Association, is a nonprofit organization whose +mission is to support the collaborative development of Odoo features and +promote its widespread use.

+

Current maintainer:

+

eLBati

+

This module is part of the OCA/storage project on GitHub.

+

You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute.

+
+
+
+ + diff --git a/attachment_db_by_checksum/tests/__init__.py b/attachment_db_by_checksum/tests/__init__.py new file mode 100644 index 0000000..c6ba00b --- /dev/null +++ b/attachment_db_by_checksum/tests/__init__.py @@ -0,0 +1,3 @@ +# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). + +from . import test_attachment_by_checksum diff --git a/attachment_db_by_checksum/tests/test_attachment_by_checksum.py b/attachment_db_by_checksum/tests/test_attachment_by_checksum.py new file mode 100644 index 0000000..3ea3eb4 --- /dev/null +++ b/attachment_db_by_checksum/tests/test_attachment_by_checksum.py @@ -0,0 +1,118 @@ +# Copyright 2023 Simone Rubino - TAKOBI +# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). + +import base64 + +from odoo.tests import SavepointCase + +from odoo.addons.attachment_db_by_checksum.models.ir_attachment import ( + HASHED_STORAGE_PARAMETER, +) + + +class TestAttachmentByChecksum(SavepointCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls.data = b"Test attachment data" + cls.attachment = cls.env["ir.attachment"].create( + { + "name": "Test attachment", + "datas": base64.b64encode(cls.data), + } + ) + # Save the fname (a2/a2...) of the attachment + # so that we can use it in tests where the attachment is deleted + cls.fname = cls.attachment.store_fname + + @classmethod + def _set_hashed_db_storage(cls): + """Set `hashed_db` Attachment Storage.""" + cls.env["ir.config_parameter"].set_param( + "ir_attachment.location", + HASHED_STORAGE_PARAMETER, + ) + + def test_force_storage(self): + """Move storage from default to `hashed_db`: + attachments are copied in `Attachment content by hash` records. + """ + # Arrange: Create an attachment + data = self.data + fname = self.fname + attachment = self.attachment + # pre-condition: The storage is not `hashed_db` + self.assertNotEqual( + self.env["ir.attachment"]._storage(), HASHED_STORAGE_PARAMETER + ) + self.assertEqual(attachment.raw, data) + + # Act: Move the storage + self._set_hashed_db_storage() + self.env["ir.attachment"].force_storage() + + # Assert: The attachment value is both in the attachment + # and in the Attachment content by hash + self.assertEqual(self.env["ir.attachment"]._storage(), HASHED_STORAGE_PARAMETER) + self.assertEqual(attachment.raw, data) + attachment_content = self.env["ir.attachment.content"].search_by_checksum(fname) + self.assertEqual(attachment_content.db_datas, data) + + def test_new_hashed_attachment(self): + """Storage is `hashed_db`: + new attachments are only stored in `Attachment content by hash` records. + """ + # Arrange: Set the storage to `hashed_db` + data = self.data + fname = self.fname + self.attachment.unlink() + self._set_hashed_db_storage() + # pre-condition + self.assertEqual(self.env["ir.attachment"]._storage(), HASHED_STORAGE_PARAMETER) + + # Act: Create an attachment + self.env["ir.attachment"].create( + { + "name": "Test attachment", + "datas": base64.b64encode(data), + } + ) + + # Assert: The new attachment value is in the Attachment content by hash + self.assertEqual(self.env["ir.attachment"]._storage(), HASHED_STORAGE_PARAMETER) + attachment_content = self.env["ir.attachment.content"].search_by_checksum(fname) + self.assertEqual(attachment_content.db_datas, data) + + def test_force_storage_invisible_menu(self): + """Move storage from default to `hashed_db`: + attachments linked to invisible menus + are copied in `Attachment content by hash` records. + """ + # Arrange: Create a menu invisible for current user + fname = self.fname + self.attachment.unlink() + menu_model = self.env["ir.ui.menu"] + invisible_menu = menu_model.create( + { + "name": "Test invisible menu", + "web_icon_data": base64.b64encode(self.data), + "groups_id": [(6, 0, self.env.ref("base.group_no_one").ids)], + } + ) + # pre-condition: The menu is invisible and storage is not `hashed_db` + self.assertNotEqual( + self.env["ir.attachment"]._storage(), HASHED_STORAGE_PARAMETER + ) + self.assertNotIn(invisible_menu, menu_model.search([])) + + # Act: Move the storage to `hashed_db` + self._set_hashed_db_storage() + self.env["ir.attachment"].with_user( + self.env.ref("base.user_admin") + ).force_storage() + + # Assert: The menu's attachment value is in the Attachment content by hash + self.assertEqual(self.env["ir.attachment"]._storage(), HASHED_STORAGE_PARAMETER) + attachment_content = self.env["ir.attachment.content"].search_by_checksum(fname) + self.assertTrue(attachment_content) diff --git a/storage_file/README.rst b/storage_file/README.rst new file mode 100644 index 0000000..6d6501d --- /dev/null +++ b/storage_file/README.rst @@ -0,0 +1,41 @@ + +.. image:: https://img.shields.io/badge/licence-LGPL--3-blue.svg + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 + +============ +Storage File +============ + + +External file management depending on Storage Backend module. + +It include these features: +* link to any Odoo model/record +* store metadata like: checksum, mimetype + +Use cases (with help of additionnal modules): +- store pdf (like invoices) on a file server with high SLA +- access attachments with read/write on prod environment and only read only on dev / testing + +Known issues / Roadmap +====================== + +* Update README with the last model of README when migration to v11 in OCA +* No file deletion / unlink + +Credits +======= + + +Contributors +------------ + +* Sebastien Beau +* Raphaël Reverdy + + +Maintainer +---------- + +* Akretion diff --git a/storage_file/__init__.py b/storage_file/__init__.py new file mode 100644 index 0000000..ada0b87 --- /dev/null +++ b/storage_file/__init__.py @@ -0,0 +1,3 @@ +from . import controllers +from . import models +from . import wizards diff --git a/storage_file/__manifest__.py b/storage_file/__manifest__.py new file mode 100644 index 0000000..30f9890 --- /dev/null +++ b/storage_file/__manifest__.py @@ -0,0 +1,26 @@ +# Copyright 2017 Akretion (http://www.akretion.com). +# @author Sébastien BEAU +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl). + +{ + "name": "Storage File", + "summary": "Storage file in storage backend", + "version": "14.0.2.4.0", + "category": "Storage", + "website": "https://github.com/OCA/storage", + "author": " Akretion, Odoo Community Association (OCA)", + "license": "LGPL-3", + "development_status": "Production/Stable", + "application": False, + "installable": True, + "external_dependencies": {"python": ["python_slugify"]}, + "depends": ["storage_backend"], + "data": [ + "views/storage_file_view.xml", + "views/storage_backend_view.xml", + "security/ir.model.access.csv", + "security/storage_file.xml", + "data/ir_cron.xml", + "data/storage_backend.xml", + ], +} diff --git a/storage_file/controllers/__init__.py b/storage_file/controllers/__init__.py new file mode 100644 index 0000000..12a7e52 --- /dev/null +++ b/storage_file/controllers/__init__.py @@ -0,0 +1 @@ +from . import main diff --git a/storage_file/controllers/main.py b/storage_file/controllers/main.py new file mode 100644 index 0000000..1417631 --- /dev/null +++ b/storage_file/controllers/main.py @@ -0,0 +1,39 @@ +# Part of Odoo. See LICENSE file for full copyright and licensing details. + +import base64 + +import werkzeug.utils +import werkzeug.wrappers + +from odoo import http +from odoo.http import request + + +class StorageFileController(http.Controller): + @http.route( + ["/storage.file/"], type="http", auth="public" + ) + def content_common(self, slug_name_with_id, token=None, download=None, **kw): + storage_file = request.env["storage.file"].get_from_slug_name_with_id( + slug_name_with_id + ) + status, headers, content = request.env["ir.http"].binary_content( + model=storage_file._name, + id=storage_file.id, + field="data", + filename_field="name", + download=download, + ) + if status == 304: + response = werkzeug.wrappers.Response(status=status, headers=headers) + elif status == 301: + return werkzeug.utils.redirect(content, code=301) + elif status != 200: + response = request.not_found() + else: + content_base64 = base64.b64decode(content) + headers.append(("Content-Length", len(content_base64))) + response = request.make_response(content_base64, headers) + if token: + response.set_cookie("fileToken", token) + return response diff --git a/storage_file/data/ir_cron.xml b/storage_file/data/ir_cron.xml new file mode 100644 index 0000000..297a92c --- /dev/null +++ b/storage_file/data/ir_cron.xml @@ -0,0 +1,15 @@ + + + + Clean Storage File + + + 1 + days + -1 + + + code + model._clean_storage_file() + + diff --git a/storage_file/data/storage_backend.xml b/storage_file/data/storage_backend.xml new file mode 100644 index 0000000..cb39acf --- /dev/null +++ b/storage_file/data/storage_backend.xml @@ -0,0 +1,8 @@ + + + + name_with_id + odoo + + + diff --git a/storage_file/i18n/storage_file.pot b/storage_file/i18n/storage_file.pot new file mode 100644 index 0000000..e8d73ac --- /dev/null +++ b/storage_file/i18n/storage_file.pot @@ -0,0 +1,361 @@ +# Translation of Odoo Server. +# This file contains the translation of the following modules: +# * storage_file +# +msgid "" +msgstr "" +"Project-Id-Version: Odoo Server 14.0\n" +"Report-Msgid-Bugs-To: \n" +"Last-Translator: \n" +"Language-Team: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: \n" +"Plural-Forms: \n" + +#. module: storage_file +#: model:ir.model.fields,help:storage_file.field_storage_file__url_path +msgid "Accessible path, no base URL" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__active +msgid "Active" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_backend__backend_view_use_internal_url +msgid "Backend View Use Internal Url" +msgstr "" + +#. module: storage_file +#: model_terms:ir.ui.view,arch_db:storage_file.storage_backend_view_form +msgid "Base URL used for files" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_backend__base_url +msgid "Base Url" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_backend__base_url_for_files +msgid "Base Url For Files" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__checksum +msgid "Checksum/SHA1" +msgstr "" + +#. module: storage_file +#: model:ir.actions.server,name:storage_file.ir_cron_clean_storage_file_ir_actions_server +#: model:ir.cron,cron_name:storage_file.ir_cron_clean_storage_file +#: model:ir.cron,name:storage_file.ir_cron_clean_storage_file +msgid "Clean Storage File" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__company_id +msgid "Company" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__create_uid +#: model:ir.model.fields,field_description:storage_file.field_storage_file_replace__create_uid +msgid "Created by" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__create_date +#: model:ir.model.fields,field_description:storage_file.field_storage_file_replace__create_date +msgid "Created on" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__data +#: model:ir.model.fields,field_description:storage_file.field_storage_file_replace__data +#: model:ir.model.fields,help:storage_file.field_storage_file__data +msgid "Data" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,help:storage_file.field_storage_backend__backend_view_use_internal_url +msgid "" +"Decide if Odoo backend views should use the external URL (usually a CDN) or " +"the internal url with direct access to the storage. This could save you some" +" money if you pay by CDN traffic." +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,help:storage_file.field_storage_backend__is_public +msgid "" +"Define if every files stored into this backend are public or not. Examples:\n" +"Private: your file/image can not be displayed is the user is not logged (not available on other website);\n" +"Public: your file/image can be displayed if nobody is logged (useful to display files on external websites)" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_ir_actions_report__display_name +#: model:ir.model.fields,field_description:storage_file.field_storage_backend__display_name +#: model:ir.model.fields,field_description:storage_file.field_storage_file__display_name +#: model:ir.model.fields,field_description:storage_file.field_storage_file_replace__display_name +msgid "Display Name" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__extension +msgid "Extension" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields.selection,name:storage_file.selection__storage_backend__served_by__external +msgid "External" +msgstr "" + +#. module: storage_file +#: model:ir.actions.act_window,name:storage_file.act_open_storage_file_view +#: model:ir.model.fields,field_description:storage_file.field_storage_file_replace__file_id +#: model:ir.ui.menu,name:storage_file.menu_storage_file +#: model_terms:ir.ui.view,arch_db:storage_file.storage_file_view_form +#: model_terms:ir.ui.view,arch_db:storage_file.storage_file_view_search +msgid "File" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file_replace__file_name +msgid "File Name" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__file_size +msgid "File Size" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__file_type +msgid "File Type" +msgstr "" + +#. module: storage_file +#: code:addons/storage_file/models/storage_file.py:0 +#, python-format +msgid "File can not be updated,remove it and create a new one" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_backend__filename_strategy +msgid "Filename Strategy" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__filename +msgid "Filename without extension" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,help:storage_file.field_storage_file__internal_url +msgid "HTTP URL to load the file directly from storage." +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,help:storage_file.field_storage_file__url +msgid "HTTP accessible path to the file" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__human_file_size +msgid "Human File Size" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_ir_actions_report__id +#: model:ir.model.fields,field_description:storage_file.field_storage_backend__id +#: model:ir.model.fields,field_description:storage_file.field_storage_file__id +#: model:ir.model.fields,field_description:storage_file.field_storage_file_replace__id +msgid "ID" +msgstr "" + +#. module: storage_file +#: model_terms:ir.ui.view,arch_db:storage_file.storage_backend_view_form +msgid "" +"If you have changed parameters via server env settings the URL might look " +"outdated." +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__internal_url +msgid "Internal Url" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_backend__is_public +msgid "Is Public" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_ir_actions_report____last_update +#: model:ir.model.fields,field_description:storage_file.field_storage_backend____last_update +#: model:ir.model.fields,field_description:storage_file.field_storage_file____last_update +#: model:ir.model.fields,field_description:storage_file.field_storage_file_replace____last_update +msgid "Last Modified on" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__write_uid +#: model:ir.model.fields,field_description:storage_file.field_storage_file_replace__write_uid +msgid "Last Updated by" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__write_date +#: model:ir.model.fields,field_description:storage_file.field_storage_file_replace__write_date +msgid "Last Updated on" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__mimetype +msgid "Mime Type" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__name +msgid "Name" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields.selection,name:storage_file.selection__storage_backend__filename_strategy__name_with_id +msgid "Name and ID" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,help:storage_file.field_storage_backend__url_include_directory_path +msgid "" +"Normally the directory_path it's for internal usage. If this flag is enabled" +" the path will be used to compute the public URL." +msgstr "" + +#. module: storage_file +#: model:ir.model.fields.selection,name:storage_file.selection__storage_backend__served_by__odoo +msgid "Odoo" +msgstr "" + +#. module: storage_file +#: model_terms:ir.ui.view,arch_db:storage_file.storage_backend_view_form +msgid "Recompute base URL for files" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__relative_path +msgid "Relative Path" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,help:storage_file.field_storage_file__relative_path +msgid "Relative location for backend" +msgstr "" + +#. module: storage_file +#: model:ir.model,name:storage_file.model_ir_actions_report +msgid "Report Action" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields.selection,name:storage_file.selection__storage_backend__filename_strategy__hash +msgid "SHA hash" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_backend__served_by +msgid "Served By" +msgstr "" + +#. module: storage_file +#: model_terms:ir.ui.view,arch_db:storage_file.storage_backend_view_form +msgid "" +"Served by Odoo option will use `web.base.url` as the base URL.\n" +"
Make sure this parameter is properly configured and accessible\n" +" from everwhere you want to access the service." +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__slug +msgid "Slug" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,help:storage_file.field_storage_file__slug +msgid "Slug-ified name with ID for URL" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__backend_id +msgid "Storage" +msgstr "" + +#. module: storage_file +#: model:ir.model,name:storage_file.model_storage_backend +msgid "Storage Backend" +msgstr "" + +#. module: storage_file +#: model:ir.model,name:storage_file.model_storage_file +msgid "Storage File" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,help:storage_file.field_storage_backend__filename_strategy +msgid "" +"Strategy to build the name of the file to be stored.\n" +"Name and ID: will store the file with its name + its id.\n" +"SHA Hash: will use the hash of the file as filename (same method as the native attachment storage)" +msgstr "" + +#. module: storage_file +#: code:addons/storage_file/models/storage_file.py:0 +#, python-format +msgid "" +"The filename strategy is empty for the backend %s.\n" +"Please configure it" +msgstr "" + +#. module: storage_file +#: model:ir.model.constraint,message:storage_file.constraint_storage_file_path_uniq +msgid "The private path must be uniq per backend" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__to_delete +msgid "To Delete" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__url +msgid "Url" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_backend__url_include_directory_path +msgid "Url Include Directory Path" +msgstr "" + +#. module: storage_file +#: model:ir.model.fields,field_description:storage_file.field_storage_file__url_path +msgid "Url Path" +msgstr "" + +#. module: storage_file +#: model_terms:ir.ui.view,arch_db:storage_file.storage_backend_view_form +msgid "" +"When served by external service you might have special environment configuration\n" +" for building final files URLs.\n" +"
For performance reasons, the base URL is computed and stored.\n" +" If you change some parameters (eg: in local dev environment or special instances)\n" +" and you still want to see the images you might need to refresh this URL\n" +" to make sure images and/or files are loaded correctly." +msgstr "" + +#. module: storage_file +#: model:ir.model,name:storage_file.model_storage_file_replace +msgid "Wizard template allowing to replace a storage.file" +msgstr "" diff --git a/storage_file/models/__init__.py b/storage_file/models/__init__.py new file mode 100644 index 0000000..49b6a5d --- /dev/null +++ b/storage_file/models/__init__.py @@ -0,0 +1,3 @@ +from . import storage_file +from . import storage_backend +from . import ir_actions_report diff --git a/storage_file/models/ir_actions_report.py b/storage_file/models/ir_actions_report.py new file mode 100644 index 0000000..5285c03 --- /dev/null +++ b/storage_file/models/ir_actions_report.py @@ -0,0 +1,14 @@ +# Copyright 2021 Camptocamp SA (http://www.camptocamp.com). +# @author Simone Orsi +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl). + +from odoo import models + + +class IrActionsReport(models.Model): + _inherit = "ir.actions.report" + + def render_qweb_pdf(self, res_ids=None, data=None): + return super( + IrActionsReport, self.with_context(print_report_pdf=True) + ).render_qweb_pdf(res_ids=res_ids, data=data) diff --git a/storage_file/models/storage_backend.py b/storage_file/models/storage_backend.py new file mode 100644 index 0000000..a9d64e1 --- /dev/null +++ b/storage_file/models/storage_backend.py @@ -0,0 +1,170 @@ +# Copyright 2017 Akretion (http://www.akretion.com). +# @author Sébastien BEAU +# Copyright 2019 Camptocamp SA (http://www.camptocamp.com). +# @author Simone Orsi +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl). + +import logging + +from odoo import api, fields, models + +_logger = logging.getLogger(__name__) + + +class StorageBackend(models.Model): + _inherit = "storage.backend" + + filename_strategy = fields.Selection( + selection=[("name_with_id", "Name and ID"), ("hash", "SHA hash")], + default="name_with_id", + help=( + "Strategy to build the name of the file to be stored.\n" + "Name and ID: will store the file with its name + its id.\n" + "SHA Hash: will use the hash of the file as filename " + "(same method as the native attachment storage)" + ), + ) + served_by = fields.Selection( + selection=[("odoo", "Odoo"), ("external", "External")], + required=True, + default="odoo", + ) + base_url = fields.Char(default="") + is_public = fields.Boolean( + default=False, + help="Define if every files stored into this backend are " + "public or not. Examples:\n" + "Private: your file/image can not be displayed is the user is " + "not logged (not available on other website);\n" + "Public: your file/image can be displayed if nobody is " + "logged (useful to display files on external websites)", + ) + url_include_directory_path = fields.Boolean( + default=False, + help="Normally the directory_path it's for internal usage. " + "If this flag is enabled " + "the path will be used to compute the public URL.", + ) + base_url_for_files = fields.Char(compute="_compute_base_url_for_files", store=True) + backend_view_use_internal_url = fields.Boolean( + help="Decide if Odoo backend views should use the external URL (usually a CDN) " + "or the internal url with direct access to the storage. " + "This could save you some money if you pay by CDN traffic." + ) + + def write(self, vals): + # Ensure storage file URLs are up to date + clear_url_cache = False + url_related_fields = ( + "served_by", + "base_url", + "directory_path", + "url_include_directory_path", + ) + for fname in url_related_fields: + if fname in vals: + clear_url_cache = True + break + res = super().write(vals) + if clear_url_cache: + self.action_recompute_base_url_for_files() + return res + + @property + def _server_env_fields(self): + env_fields = super()._server_env_fields + env_fields.update( + { + "filename_strategy": {}, + "served_by": {}, + "base_url": {}, + "url_include_directory_path": {}, + } + ) + return env_fields + + _default_backend_xid = "storage_backend.default_storage_backend" + + @classmethod + def _get_backend_id_from_param(cls, env, param_name, default_fallback=True): + backend_id = None + param = env["ir.config_parameter"].sudo().get_param(param_name) + if param: + if param.isdigit(): + backend_id = int(param) + elif "." in param: + backend = env.ref(param, raise_if_not_found=False) + if backend: + backend_id = backend.id + if not backend_id and default_fallback: + backend = env.ref(cls._default_backend_xid, raise_if_not_found=False) + if backend: + backend_id = backend.id + else: + _logger.warn("No backend found, no default fallback found.") + return backend_id + + @api.depends( + "served_by", + "base_url", + "directory_path", + "url_include_directory_path", + ) + def _compute_base_url_for_files(self): + for record in self: + record.base_url_for_files = record._get_base_url_for_files() + + def _get_base_url_for_files(self): + """Retrieve base URL for files.""" + backend = self.sudo() + parts = [] + if backend.served_by == "external": + parts = [backend.base_url or ""] + if backend.url_include_directory_path and backend.directory_path: + parts.append(backend.directory_path) + return "/".join(parts) + + def action_recompute_base_url_for_files(self): + """Refresh base URL for files. + + Rationale: all the params for computing this URL might come from server env. + When this is the case, the URL - being stored - might be out of date. + This is because changes to server env fields are not detected at startup. + Hence, let's offer an easy way to promptly force this manually when needed. + """ + self._compute_base_url_for_files() + self.env["storage.file"].invalidate_cache(["url"]) + + def _get_base_url_from_param(self): + base_url_param = ( + "report.url" if self.env.context.get("print_report_pdf") else "web.base.url" + ) + return self.env["ir.config_parameter"].sudo().get_param(base_url_param) + + def _get_url_for_file(self, storage_file, exclude_base_url=False): + """Return final full URL for given file.""" + backend = self.sudo() + if backend.served_by == "odoo": + parts = [ + self._get_base_url_from_param() if not exclude_base_url else "/", + "storage.file", + storage_file.slug, + ] + else: + base_url = backend.base_url_for_files + if exclude_base_url: + base_url = base_url.replace(backend.base_url, "") or "/" + parts = [base_url, storage_file.relative_path or ""] + return "/".join([x.rstrip("/") for x in parts if x]) + + def _register_hook(self): + super()._register_hook() + backends = self.search([]).filtered( + lambda x: x._get_base_url_for_files() != x.base_url_for_files + ) + if not backends: + return + sql = f"SELECT id FROM {self._table} WHERE ID IN %s FOR UPDATE" + self.env.cr.execute(sql, (tuple(backends.ids),), log_exceptions=False) + backends.action_recompute_base_url_for_files() + _logger.info("storage.backend base URL for files refreshed") diff --git a/storage_file/models/storage_file.py b/storage_file/models/storage_file.py new file mode 100644 index 0000000..a359311 --- /dev/null +++ b/storage_file/models/storage_file.py @@ -0,0 +1,238 @@ +# Copyright 2017 Akretion (http://www.akretion.com). +# @author Sébastien BEAU +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl). + +import base64 +import hashlib +import logging +import mimetypes +import os +import re + +from odoo import api, fields, models +from odoo.exceptions import UserError +from odoo.tools import human_size +from odoo.tools.translate import _ + +_logger = logging.getLogger(__name__) + +try: + from slugify import slugify +except ImportError: # pragma: no cover + _logger.debug("Cannot `import slugify`.") + + +REGEX_SLUGIFY = r"[^-a-z0-9_]+" + + +class StorageFile(models.Model): + _name = "storage.file" + _description = "Storage File" + + name = fields.Char(required=True, index=True) + backend_id = fields.Many2one( + "storage.backend", "Storage", index=True, required=True + ) + url = fields.Char(compute="_compute_url", help="HTTP accessible path to the file") + url_path = fields.Char( + compute="_compute_url_path", help="Accessible path, no base URL" + ) + internal_url = fields.Char( + compute="_compute_internal_url", + help="HTTP URL to load the file directly from storage.", + ) + slug = fields.Char( + compute="_compute_slug", help="Slug-ified name with ID for URL", store=True + ) + relative_path = fields.Char( + readonly=True, help="Relative location for backend", copy=False + ) + file_size = fields.Integer("File Size") + human_file_size = fields.Char( + "Human File Size", compute="_compute_human_file_size", store=True + ) + checksum = fields.Char("Checksum/SHA1", size=40, index=True, readonly=True) + filename = fields.Char( + "Filename without extension", compute="_compute_extract_filename", store=True + ) + extension = fields.Char( + "Extension", compute="_compute_extract_filename", store=True + ) + mimetype = fields.Char("Mime Type", compute="_compute_extract_filename", store=True) + data = fields.Binary( + help="Data", + inverse="_inverse_data", + compute="_compute_data", + store=False, + copy=True, + ) + to_delete = fields.Boolean() + active = fields.Boolean(default=True) + company_id = fields.Many2one( + "res.company", "Company", default=lambda self: self.env.user.company_id.id + ) + file_type = fields.Selection([]) + + _sql_constraints = [ + ( + "path_uniq", + "unique(relative_path, backend_id)", + "The private path must be uniq per backend", + ) + ] + + def write(self, vals): + if "data" in vals: + for record in self: + if record.data: + raise UserError( + _("File can not be updated," "remove it and create a new one") + ) + return super(StorageFile, self).write(vals) + + @api.depends("file_size") + def _compute_human_file_size(self): + for record in self: + record.human_file_size = human_size(record.file_size) + + @api.depends("filename", "extension") + def _compute_slug(self): + for record in self: + record.slug = record._slugify_name_with_id() + + def _slugify_name_with_id(self): + return "{}{}".format( + slugify( + "{}-{}".format(self.filename, self.id), regex_pattern=REGEX_SLUGIFY + ), + self.extension, + ) + + def _build_relative_path(self, checksum): + self.ensure_one() + strategy = self.sudo().backend_id.filename_strategy + if not strategy: + raise UserError( + _( + "The filename strategy is empty for the backend %s.\n" + "Please configure it" + ) + % self.backend_id.name + ) + if strategy == "hash": + return checksum[:2] + "/" + checksum + elif strategy == "name_with_id": + return self.slug + + def _prepare_meta_for_file(self): + bin_data = base64.b64decode(self.data) + checksum = hashlib.sha1(bin_data).hexdigest() + relative_path = self._build_relative_path(checksum) + return { + "checksum": checksum, + "file_size": len(bin_data), + "relative_path": relative_path, + } + + def _inverse_data(self): + for record in self: + record.write(record._prepare_meta_for_file()) + record.backend_id.sudo().add( + record.relative_path, + record.data, + mimetype=record.mimetype, + binary=False, + ) + + def _compute_data(self): + for rec in self: + if self._context.get("bin_size"): + rec.data = rec.file_size + elif rec.relative_path: + rec.data = rec.backend_id.sudo().get(rec.relative_path, binary=False) + else: + rec.data = None + + @api.depends("relative_path", "backend_id") + def _compute_url(self): + for record in self: + record.url = record._get_url() + + @api.depends("relative_path", "backend_id") + def _compute_url_path(self): + # Keep this separated from `url` to avoid multiple compute: + # you'll need either one or the other. + for record in self: + record.url_path = record._get_url(exclude_base_url=True) + + def _get_url(self, exclude_base_url=False): + """Retrieve file URL based on backend params. + + :param exclude_base_url: skip base_url + """ + return self.backend_id._get_url_for_file( + self, exclude_base_url=exclude_base_url + ) + + @api.depends("slug") + def _compute_internal_url(self): + for record in self: + record.internal_url = record._get_internal_url() + + def _get_internal_url(self): + """Retrieve file URL to load file directly from the storage. + + It is recommended to use this for Odoo backend internal usage + to not generate traffic on external services. + """ + return f"/storage.file/{self.slug}" + + @api.depends("name") + def _compute_extract_filename(self): + for rec in self: + if rec.name: + rec.filename, rec.extension = os.path.splitext(rec.name) + mime, __ = mimetypes.guess_type(rec.name) + else: + rec.filename = rec.extension = mime = False + rec.mimetype = mime + + def unlink(self): + if self._context.get("cleanning_storage_file"): + super(StorageFile, self).unlink() + else: + self.write({"to_delete": True, "active": False}) + return True + + @api.model + def _clean_storage_file(self): + # we must be sure that all the changes are into the DB since + # we by pass the ORM + self.flush() + self._cr.execute( + """SELECT id + FROM storage_file + WHERE to_delete=True FOR UPDATE""" + ) + ids = [x[0] for x in self._cr.fetchall()] + for st_file in self.browse(ids): + st_file.backend_id.sudo().delete(st_file.relative_path) + st_file.with_context(cleanning_storage_file=True).unlink() + # commit is required since the backend could be an external system + # therefore, if the record is deleted on the external system + # we must be sure that the record is also deleted into Odoo + st_file._cr.commit() + + @api.model + def get_from_slug_name_with_id(self, slug_name_with_id): + """ + Return a browse record from a string generated by the method + _slugify_name_with_id + :param slug_name_with_id: + :return: a BrowseRecord (could be empty...) + """ + # id is the last group of digit after '-' + _id = re.findall(r"-([0-9]+)", slug_name_with_id)[-1:] + if _id: + _id = int(_id[0]) + return self.browse(_id) diff --git a/storage_file/security/ir.model.access.csv b/storage_file/security/ir.model.access.csv new file mode 100644 index 0000000..21c0347 --- /dev/null +++ b/storage_file/security/ir.model.access.csv @@ -0,0 +1,3 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink +access_storage_file_edit,storage_file edit,model_storage_file,base.group_system,1,1,1,1 +access_storage_file_read_public,storage_file public read,model_storage_file,,1,0,0,0 diff --git a/storage_file/security/storage_file.xml b/storage_file/security/storage_file.xml new file mode 100644 index 0000000..2510d3d --- /dev/null +++ b/storage_file/security/storage_file.xml @@ -0,0 +1,16 @@ + + + + + + Storage file public + + + [('backend_id.is_public', '=', True)] + + + + + + diff --git a/storage_file/static/description/icon.png b/storage_file/static/description/icon.png new file mode 100644 index 0000000..3a0328b Binary files /dev/null and b/storage_file/static/description/icon.png differ diff --git a/storage_file/tests/__init__.py b/storage_file/tests/__init__.py new file mode 100644 index 0000000..f2d7ae7 --- /dev/null +++ b/storage_file/tests/__init__.py @@ -0,0 +1 @@ +from . import test_storage_file diff --git a/storage_file/tests/test_storage_file.py b/storage_file/tests/test_storage_file.py new file mode 100644 index 0000000..1d72300 --- /dev/null +++ b/storage_file/tests/test_storage_file.py @@ -0,0 +1,311 @@ +# Copyright 2017 Akretion (http://www.akretion.com). +# @author Sébastien BEAU +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl). + +import base64 +from urllib import parse + +import mock + +from odoo.exceptions import AccessError, UserError + +from odoo.addons.component.tests.common import TransactionComponentCase + + +class StorageFileCase(TransactionComponentCase): + def setUp(self): + super().setUp() + self.backend = self.env.ref("storage_backend.default_storage_backend") + data = b"This is a simple file" + self.filesize = len(data) + self.filedata = base64.b64encode(data) + self.filename = "test of my_file.txt" + + def _create_storage_file(self): + return self.env["storage.file"].create( + { + "name": self.filename, + "backend_id": self.backend.id, + "data": self.filedata, + } + ) + + def test_create_and_read_served_by_odoo(self): + stfile = self._create_storage_file() + self.assertEqual(stfile.data, self.filedata) + self.assertEqual(stfile.mimetype, "text/plain") + self.assertEqual(stfile.extension, ".txt") + self.assertEqual(stfile.filename, "test of my_file") + self.assertEqual(stfile.relative_path, "test-of-my_file-%s.txt" % stfile.id) + url = parse.urlparse(stfile.url) + self.assertEqual(url.path, "/storage.file/test-of-my_file-%s.txt" % stfile.id) + self.assertEqual(stfile.file_size, self.filesize) + + def test_get_from_slug_name_with_id(self): + stfile = self._create_storage_file() + stfile2 = self.env["storage.file"].get_from_slug_name_with_id( + "test-of-my_file-%s.txt" % stfile.id + ) + self.assertEqual(stfile, stfile2) + # the method parse the given string to find the id. The id is the + # last sequence of digit starting with '-' + stfile2 = self.env["storage.file"].get_from_slug_name_with_id( + "test-999-%s.txt2" % stfile.id + ) + self.assertEqual(stfile, stfile2) + stfile2 = self.env["storage.file"].get_from_slug_name_with_id( + "test-999-%s" % stfile.id + ) + self.assertEqual(stfile, stfile2) + + def test_slug(self): + stfile = self._create_storage_file() + self.assertEqual( + stfile.slug, + "test-of-my_file-{}.txt".format(stfile.id), + ) + stfile.name = "Name has changed.png" + self.assertEqual( + stfile.slug, + "name-has-changed-{}.png".format(stfile.id), + ) + + def test_internal_url(self): + stfile = self._create_storage_file() + self.assertEqual( + stfile.internal_url, + "/storage.file/test-of-my_file-{}.txt".format(stfile.id), + ) + stfile.name = "Name has changed.png" + self.assertEqual( + stfile.slug, + "name-has-changed-{}.png".format(stfile.id), + ) + self.assertEqual( + stfile.internal_url, + "/storage.file/name-has-changed-{}.png".format(stfile.id), + ) + + def test_url(self): + stfile = self._create_storage_file() + params = self.env["ir.config_parameter"].sudo() + base_url = params.get_param("web.base.url") + # served by odoo + self.assertEqual( + stfile.url, + "{}/storage.file/test-of-my_file-{}.txt".format(base_url, stfile.id), + ) + # served by external + stfile.backend_id.update( + { + "served_by": "external", + "base_url": "https://foo.com", + "directory_path": "baz", + } + ) + # path not included + self.assertEqual( + stfile.url, "https://foo.com/test-of-my_file-{}.txt".format(stfile.id) + ) + # path included + stfile.backend_id.url_include_directory_path = True + self.assertEqual( + stfile.url, "https://foo.com/baz/test-of-my_file-{}.txt".format(stfile.id) + ) + + def test_url_without_base_url(self): + stfile = self._create_storage_file() + # served by odoo + self.assertEqual( + stfile.url_path, + "/storage.file/test-of-my_file-{}.txt".format(stfile.id), + ) + # served by external + stfile.backend_id.update( + { + "served_by": "external", + "base_url": "https://foo.com", + "directory_path": "baz", + } + ) + stfile.invalidate_cache() + # path not included + self.assertEqual( + stfile.with_context(foo=1).url_path, + "/test-of-my_file-{}.txt".format(stfile.id), + ) + # path included + stfile.backend_id.url_include_directory_path = True + stfile.invalidate_cache() + self.assertEqual( + stfile.url_path, + "/baz/test-of-my_file-{}.txt".format(stfile.id), + ) + + def test_url_for_report(self): + stfile = self._create_storage_file() + params = self.env["ir.config_parameter"].sudo() + params.set_param("report.url", "http://report.url") + # served by odoo + self.assertEqual( + stfile.with_context(print_report_pdf=True).url, + "http://report.url/storage.file/test-of-my_file-{}.txt".format(stfile.id), + ) + + def test_create_store_with_hash(self): + self.backend.filename_strategy = "hash" + stfile = self._create_storage_file() + self.assertEqual(stfile.data, self.filedata) + self.assertEqual(stfile.mimetype, "text/plain") + self.assertEqual(stfile.extension, ".txt") + self.assertEqual(stfile.filename, "test of my_file") + self.assertEqual( + stfile.relative_path, "13/1322d9ccb3d257095185b205eadc9307aae5dc84" + ) + + def test_missing_name_strategy(self): + self.backend.filename_strategy = None + with self.assertRaises(UserError): + self._create_storage_file() + + def test_create_and_read_served_by_external(self): + self.backend.write( + {"served_by": "external", "base_url": "https://cdn.example.com"} + ) + stfile = self._create_storage_file() + self.assertEqual(stfile.data, self.filedata) + self.assertEqual( + stfile.url, "https://cdn.example.com/test-of-my_file-%s.txt" % stfile.id + ) + self.assertEqual(stfile.file_size, self.filesize) + + def test_read_bin_size(self): + stfile = self._create_storage_file() + self.assertEqual(stfile.with_context(bin_size=True).data, b"21.00 bytes") + + def test_cannot_update_data(self): + stfile = self._create_storage_file() + data = base64.b64encode(b"This is different data") + with self.assertRaises(UserError): + stfile.write({"data": data}) + + # check that the file have been not modified + self.assertEqual(stfile.read()[0]["data"], self.filedata) + + def test_unlink(self): + # Do not commit during the test + self.cr.commit = lambda: True + stfile = self._create_storage_file() + + backend = stfile.backend_id + relative_path = stfile.relative_path + stfile.unlink() + + # Check the the storage file is set to delete + # and the file still exist on the storage + self.assertEqual(stfile.to_delete, True) + self.assertIn(relative_path, backend.list_files()) + + # Run the method to clean the storage.file + self.env["storage.file"]._clean_storage_file() + + # Check that the file is deleted + files = ( + self.env["storage.file"] + .with_context(active_test=False) + .search([("id", "=", stfile.id)]) + ) + self.assertEqual(len(files), 0) + self.assertNotIn(relative_path, backend.list_files()) + + def test_public_access1(self): + """ + Test the public access (when is_public on the backend). + When checked, the public user should have access to every content + (storage.file). + For this case, we use this public user and try to read a field on + no-public storage.file. + An exception should be raised because the backend is not public + :return: bool + """ + storage_file = self._create_storage_file() + # Ensure it's False (we shouldn't specify a is_public = False on the + # storage.backend creation because False must be the default value) + self.assertFalse(storage_file.backend_id.is_public) + # Public user used on the controller when authentication is 'public' + public_user = self.env.ref("base.public_user") + with self.assertRaises(AccessError): + # BUG OR NOT with_user doesn't invalidate the cache... + # force cache invalidation + self.env.cache.invalidate() + self.env[storage_file._name].with_user(public_user).browse( + storage_file.ids + ).name + return True + + def test_public_access2(self): + """ + Test the public access (when is_public on the backend). + When checked, the public user should have access to every content + (storage.file). + For this case, we use this public user and try to read a field on + no-public storage.file. + This public user should have access because the backend is public + :return: bool + """ + storage_file = self._create_storage_file() + storage_file.backend_id.write({"is_public": True}) + self.assertTrue(storage_file.backend_id.is_public) + # Public user used on the controller when authentication is 'public' + public_user = self.env.ref("base.public_user") + env = self.env(user=public_user) + storage_file_public = env[storage_file._name].browse(storage_file.ids) + self.assertTrue(storage_file_public.name) + return True + + def test_public_access3(self): + """ + Test the public access (when is_public on the backend). + When checked, the public user should have access to every content + (storage.file). + For this case, we use the demo user and try to read a field on + no-public storage.file (no exception should be raised) + :return: bool + """ + storage_file = self._create_storage_file() + # Ensure it's False (we shouldn't specify a is_public = False on the + # storage.backend creation because False must be the default value) + self.assertFalse(storage_file.backend_id.is_public) + demo_user = self.env.ref("base.user_demo") + env = self.env(user=demo_user) + storage_file_public = env[storage_file._name].browse(storage_file.ids) + self.assertTrue(storage_file_public.name) + return True + + def test_get_backend_from_param(self): + storage_file = self._create_storage_file() + with mock.patch.object( + type(self.env["ir.config_parameter"]), "get_param" + ) as mocked: + mocked.return_value = str(storage_file.backend_id.id) + self.assertEqual( + self.env["storage.backend"]._get_backend_id_from_param( + self.env, "foo.baz" + ), + storage_file.backend_id.id, + ) + with mock.patch.object( + type(self.env["ir.config_parameter"]), "get_param" + ) as mocked: + mocked.return_value = "storage_backend.default_storage_backend" + self.assertEqual( + self.env["storage.backend"]._get_backend_id_from_param( + self.env, "foo.baz" + ), + storage_file.backend_id.id, + ) + + def test_empty(self): + # get_url is called on new records + empty = self.env["storage.file"].new({})._get_url() + self.assertEqual(empty, "") diff --git a/storage_file/views/storage_backend_view.xml b/storage_file/views/storage_backend_view.xml new file mode 100644 index 0000000..b5ec35f --- /dev/null +++ b/storage_file/views/storage_backend_view.xml @@ -0,0 +1,60 @@ + + + + storage.backend + + + + + + + + + + + + + + +