Merge pull request #707 from taigaio/issue/4114/remove_incomplete_project_if_import_fail
Fix issue #4114: Remove incomplete project if import process fail (now-is-when version)remotes/origin/issue/4795/notification_even_they_are_disabled
commit
22342878db
|
@ -4,6 +4,8 @@
|
|||
## 2.1.0 ??? (unreleased)
|
||||
|
||||
### Features
|
||||
- Add sprint name and slug on search results for user stories ((thanks to [@everblut](https://github.com/everblut)))
|
||||
- [API] projects resource: Random order if `discover_mode=true` and `is_featured=true`.
|
||||
- Webhooks: Improve webhook data:
|
||||
- add permalinks
|
||||
- owner, assigned_to, status, type, priority, severity, user_story, milestone, project are objects
|
||||
|
@ -15,8 +17,9 @@
|
|||
- CSV Reports:
|
||||
- Change field name: 'milestone' to 'sprint'
|
||||
- Add new fields: 'sprint_estimated_start' and 'sprint_estimated_end'
|
||||
- Add sprint name and slug on search results for user stories ((thanks to [@everblut](https://github.com/everblut)))
|
||||
- [API] projects resource: Random order if `discover_mode=true` and `is_featured=true`.
|
||||
- Importer:
|
||||
- Remove project after load a dump file fails
|
||||
- Add more info the the logger if load a dump file fails
|
||||
|
||||
### Misc
|
||||
- Lots of small and not so small bugfixes.
|
||||
|
|
|
@ -96,3 +96,11 @@ DATABASES = {
|
|||
# If is True /front/sitemap.xml show a valid sitemap of taiga-front client
|
||||
#FRONT_SITEMAP_ENABLED = False
|
||||
#FRONT_SITEMAP_CACHE_TIMEOUT = 24*60*60 # In second
|
||||
|
||||
# CELERY
|
||||
#from .celery import *
|
||||
#CELERY_ENABLED = True
|
||||
#
|
||||
# To use celery in memory
|
||||
#CELERY_ENABLED = True
|
||||
#CELERY_ALWAYS_EAGER = True
|
||||
|
|
|
@ -17,10 +17,8 @@
|
|||
|
||||
from .development import *
|
||||
|
||||
SKIP_SOUTH_TESTS = True
|
||||
SOUTH_TESTS_MIGRATE = False
|
||||
CELERY_ALWAYS_EAGER = True
|
||||
CELERY_ENABLED = False
|
||||
CELERY_ALWAYS_EAGER = True
|
||||
|
||||
MEDIA_ROOT = "/tmp"
|
||||
|
||||
|
|
|
@ -36,14 +36,14 @@ from taiga.projects.models import Project, Membership
|
|||
from taiga.projects.issues.models import Issue
|
||||
from taiga.projects.tasks.models import Task
|
||||
from taiga.projects.serializers import ProjectSerializer
|
||||
from taiga.users import services as users_service
|
||||
from taiga.users import services as users_services
|
||||
|
||||
from . import exceptions as err
|
||||
from . import mixins
|
||||
from . import serializers
|
||||
from . import service
|
||||
from . import permissions
|
||||
from . import serializers
|
||||
from . import services
|
||||
from . import tasks
|
||||
from . import dump_service
|
||||
from . import throttling
|
||||
from .renderers import ExportRenderer
|
||||
|
||||
|
@ -72,7 +72,7 @@ class ProjectExporterViewSet(mixins.ImportThrottlingPolicyMixin, GenericViewSet)
|
|||
path = "exports/{}/{}-{}.json".format(project.pk, project.slug, uuid.uuid4().hex)
|
||||
storage_path = default_storage.path(path)
|
||||
with default_storage.open(storage_path, mode="w") as outfile:
|
||||
service.render_project(project, outfile)
|
||||
services.render_project(project, outfile)
|
||||
|
||||
response_data = {
|
||||
"url": default_storage.url(path)
|
||||
|
@ -96,7 +96,7 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
total_memberships = len([m for m in data.get("memberships", [])
|
||||
if m.get("email", None) != data["owner"]])
|
||||
total_memberships = total_memberships + 1 # 1 is the owner
|
||||
(enough_slots, error_message) = users_service.has_available_slot_for_import_new_project(
|
||||
(enough_slots, error_message) = users_services.has_available_slot_for_import_new_project(
|
||||
self.request.user,
|
||||
is_private,
|
||||
total_memberships
|
||||
|
@ -105,22 +105,22 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
raise exc.NotEnoughSlotsForProject(is_private, total_memberships, error_message)
|
||||
|
||||
# Create Project
|
||||
project_serialized = service.store_project(data)
|
||||
project_serialized = services.store.store_project(data)
|
||||
|
||||
if not project_serialized:
|
||||
raise exc.BadRequest(service.get_errors())
|
||||
raise exc.BadRequest(services.store.get_errors())
|
||||
|
||||
# Create roles
|
||||
roles_serialized = None
|
||||
if "roles" in data:
|
||||
roles_serialized = service.store_roles(project_serialized.object, data)
|
||||
roles_serialized = services.store.store_roles(project_serialized.object, data)
|
||||
|
||||
if not roles_serialized:
|
||||
raise exc.BadRequest(_("We needed at least one role"))
|
||||
|
||||
# Create memberships
|
||||
if "memberships" in data:
|
||||
service.store_memberships(project_serialized.object, data)
|
||||
services.store.store_memberships(project_serialized.object, data)
|
||||
|
||||
try:
|
||||
owner_membership = project_serialized.object.memberships.get(user=project_serialized.object.owner)
|
||||
|
@ -137,57 +137,57 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
|
||||
# Create project values choicess
|
||||
if "points" in data:
|
||||
service.store_choices(project_serialized.object, data,
|
||||
"points", serializers.PointsExportSerializer)
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"points", serializers.PointsExportSerializer)
|
||||
if "issue_types" in data:
|
||||
service.store_choices(project_serialized.object, data,
|
||||
"issue_types",
|
||||
serializers.IssueTypeExportSerializer)
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"issue_types",
|
||||
serializers.IssueTypeExportSerializer)
|
||||
if "issue_statuses" in data:
|
||||
service.store_choices(project_serialized.object, data,
|
||||
"issue_statuses",
|
||||
serializers.IssueStatusExportSerializer,)
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"issue_statuses",
|
||||
serializers.IssueStatusExportSerializer,)
|
||||
if "us_statuses" in data:
|
||||
service.store_choices(project_serialized.object, data,
|
||||
"us_statuses",
|
||||
serializers.UserStoryStatusExportSerializer,)
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"us_statuses",
|
||||
serializers.UserStoryStatusExportSerializer,)
|
||||
if "task_statuses" in data:
|
||||
service.store_choices(project_serialized.object, data,
|
||||
"task_statuses",
|
||||
serializers.TaskStatusExportSerializer)
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"task_statuses",
|
||||
serializers.TaskStatusExportSerializer)
|
||||
if "priorities" in data:
|
||||
service.store_choices(project_serialized.object, data,
|
||||
"priorities",
|
||||
serializers.PriorityExportSerializer)
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"priorities",
|
||||
serializers.PriorityExportSerializer)
|
||||
if "severities" in data:
|
||||
service.store_choices(project_serialized.object, data,
|
||||
"severities",
|
||||
serializers.SeverityExportSerializer)
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"severities",
|
||||
serializers.SeverityExportSerializer)
|
||||
|
||||
if ("points" in data or "issues_types" in data or
|
||||
"issues_statuses" in data or "us_statuses" in data or
|
||||
"task_statuses" in data or "priorities" in data or
|
||||
"severities" in data):
|
||||
service.store_default_choices(project_serialized.object, data)
|
||||
services.store.store_default_project_attributes_values(project_serialized.object, data)
|
||||
|
||||
# Created custom attributes
|
||||
if "userstorycustomattributes" in data:
|
||||
service.store_custom_attributes(project_serialized.object, data,
|
||||
"userstorycustomattributes",
|
||||
serializers.UserStoryCustomAttributeExportSerializer)
|
||||
services.store.store_custom_attributes(project_serialized.object, data,
|
||||
"userstorycustomattributes",
|
||||
serializers.UserStoryCustomAttributeExportSerializer)
|
||||
|
||||
if "taskcustomattributes" in data:
|
||||
service.store_custom_attributes(project_serialized.object, data,
|
||||
"taskcustomattributes",
|
||||
serializers.TaskCustomAttributeExportSerializer)
|
||||
services.store.store_custom_attributes(project_serialized.object, data,
|
||||
"taskcustomattributes",
|
||||
serializers.TaskCustomAttributeExportSerializer)
|
||||
|
||||
if "issuecustomattributes" in data:
|
||||
service.store_custom_attributes(project_serialized.object, data,
|
||||
"issuecustomattributes",
|
||||
serializers.IssueCustomAttributeExportSerializer)
|
||||
services.store.store_custom_attributes(project_serialized.object, data,
|
||||
"issuecustomattributes",
|
||||
serializers.IssueCustomAttributeExportSerializer)
|
||||
|
||||
# Is there any error?
|
||||
errors = service.get_errors()
|
||||
errors = services.store.get_errors()
|
||||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
|
@ -199,21 +199,33 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
|
||||
@detail_route(methods=['post'])
|
||||
@method_decorator(atomic)
|
||||
def issue(self, request, *args, **kwargs):
|
||||
def milestone(self, request, *args, **kwargs):
|
||||
project = self.get_object_or_none()
|
||||
self.check_permissions(request, 'import_item', project)
|
||||
|
||||
signals.pre_save.disconnect(sender=Issue,
|
||||
dispatch_uid="set_finished_date_when_edit_issue")
|
||||
milestone = services.store.store_milestone(project, request.DATA.copy())
|
||||
|
||||
issue = service.store_issue(project, request.DATA.copy())
|
||||
|
||||
errors = service.get_errors()
|
||||
errors = services.store.get_errors()
|
||||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
headers = self.get_success_headers(issue.data)
|
||||
return response.Created(issue.data, headers=headers)
|
||||
headers = self.get_success_headers(milestone.data)
|
||||
return response.Created(milestone.data, headers=headers)
|
||||
|
||||
@detail_route(methods=['post'])
|
||||
@method_decorator(atomic)
|
||||
def us(self, request, *args, **kwargs):
|
||||
project = self.get_object_or_none()
|
||||
self.check_permissions(request, 'import_item', project)
|
||||
|
||||
us = services.store.store_user_story(project, request.DATA.copy())
|
||||
|
||||
errors = services.store.get_errors()
|
||||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
headers = self.get_success_headers(us.data)
|
||||
return response.Created(us.data, headers=headers)
|
||||
|
||||
@detail_route(methods=['post'])
|
||||
@method_decorator(atomic)
|
||||
|
@ -224,9 +236,9 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
signals.pre_save.disconnect(sender=Task,
|
||||
dispatch_uid="set_finished_date_when_edit_task")
|
||||
|
||||
task = service.store_task(project, request.DATA.copy())
|
||||
task = services.store.store_task(project, request.DATA.copy())
|
||||
|
||||
errors = service.get_errors()
|
||||
errors = services.store.get_errors()
|
||||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
|
@ -235,33 +247,21 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
|
||||
@detail_route(methods=['post'])
|
||||
@method_decorator(atomic)
|
||||
def us(self, request, *args, **kwargs):
|
||||
def issue(self, request, *args, **kwargs):
|
||||
project = self.get_object_or_none()
|
||||
self.check_permissions(request, 'import_item', project)
|
||||
|
||||
us = service.store_user_story(project, request.DATA.copy())
|
||||
signals.pre_save.disconnect(sender=Issue,
|
||||
dispatch_uid="set_finished_date_when_edit_issue")
|
||||
|
||||
errors = service.get_errors()
|
||||
issue = services.store.store_issue(project, request.DATA.copy())
|
||||
|
||||
errors = services.store.get_errors()
|
||||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
headers = self.get_success_headers(us.data)
|
||||
return response.Created(us.data, headers=headers)
|
||||
|
||||
@detail_route(methods=['post'])
|
||||
@method_decorator(atomic)
|
||||
def milestone(self, request, *args, **kwargs):
|
||||
project = self.get_object_or_none()
|
||||
self.check_permissions(request, 'import_item', project)
|
||||
|
||||
milestone = service.store_milestone(project, request.DATA.copy())
|
||||
|
||||
errors = service.get_errors()
|
||||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
headers = self.get_success_headers(milestone.data)
|
||||
return response.Created(milestone.data, headers=headers)
|
||||
headers = self.get_success_headers(issue.data)
|
||||
return response.Created(issue.data, headers=headers)
|
||||
|
||||
@detail_route(methods=['post'])
|
||||
@method_decorator(atomic)
|
||||
|
@ -269,9 +269,9 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
project = self.get_object_or_none()
|
||||
self.check_permissions(request, 'import_item', project)
|
||||
|
||||
wiki_page = service.store_wiki_page(project, request.DATA.copy())
|
||||
wiki_page = services.store.store_wiki_page(project, request.DATA.copy())
|
||||
|
||||
errors = service.get_errors()
|
||||
errors = services.store.get_errors()
|
||||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
|
@ -284,9 +284,9 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
project = self.get_object_or_none()
|
||||
self.check_permissions(request, 'import_item', project)
|
||||
|
||||
wiki_link = service.store_wiki_link(project, request.DATA.copy())
|
||||
wiki_link = services.store.store_wiki_link(project, request.DATA.copy())
|
||||
|
||||
errors = service.get_errors()
|
||||
errors = services.store.get_errors()
|
||||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
|
@ -327,7 +327,7 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
total_memberships = len([m for m in dump.get("memberships", [])
|
||||
if m.get("email", None) != dump["owner"]])
|
||||
total_memberships = total_memberships + 1 # 1 is the owner
|
||||
(enough_slots, error_message) = users_service.has_available_slot_for_import_new_project(
|
||||
(enough_slots, error_message) = users_services.has_available_slot_for_import_new_project(
|
||||
user,
|
||||
is_private,
|
||||
total_memberships
|
||||
|
@ -335,11 +335,23 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
if not enough_slots:
|
||||
raise exc.NotEnoughSlotsForProject(is_private, total_memberships, error_message)
|
||||
|
||||
# Async mode
|
||||
if settings.CELERY_ENABLED:
|
||||
task = tasks.load_project_dump.delay(user, dump)
|
||||
return response.Accepted({"import_id": task.id})
|
||||
|
||||
project = dump_service.dict_to_project(dump, request.user)
|
||||
response_data = ProjectSerializer(project).data
|
||||
return response.Created(response_data)
|
||||
# Sync mode
|
||||
try:
|
||||
project = services.store_project_from_dict(dump, request.user)
|
||||
except err.TaigaImportError as e:
|
||||
# On Error
|
||||
## remove project
|
||||
if e.project:
|
||||
e.project.delete_related_content()
|
||||
e.project.delete()
|
||||
|
||||
return response.BadRequest({"error": e.message, "details": e.errors})
|
||||
else:
|
||||
# On Success
|
||||
response_data = ProjectSerializer(project).data
|
||||
return response.Created(response_data)
|
||||
|
|
|
@ -1,202 +0,0 @@
|
|||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from taiga.projects.models import Membership, Project
|
||||
from taiga.users import services as users_service
|
||||
|
||||
from . import serializers
|
||||
from . import service
|
||||
|
||||
|
||||
class TaigaImportError(Exception):
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
|
||||
|
||||
def store_milestones(project, data):
|
||||
results = []
|
||||
for milestone_data in data.get("milestones", []):
|
||||
milestone = service.store_milestone(project, milestone_data)
|
||||
results.append(milestone)
|
||||
return results
|
||||
|
||||
|
||||
def store_tasks(project, data):
|
||||
results = []
|
||||
for task in data.get("tasks", []):
|
||||
task = service.store_task(project, task)
|
||||
results.append(task)
|
||||
return results
|
||||
|
||||
|
||||
def store_wiki_pages(project, data):
|
||||
results = []
|
||||
for wiki_page in data.get("wiki_pages", []):
|
||||
results.append(service.store_wiki_page(project, wiki_page))
|
||||
return results
|
||||
|
||||
|
||||
def store_wiki_links(project, data):
|
||||
results = []
|
||||
for wiki_link in data.get("wiki_links", []):
|
||||
results.append(service.store_wiki_link(project, wiki_link))
|
||||
return results
|
||||
|
||||
|
||||
def store_user_stories(project, data):
|
||||
results = []
|
||||
for userstory in data.get("user_stories", []):
|
||||
us = service.store_user_story(project, userstory)
|
||||
results.append(us)
|
||||
return results
|
||||
|
||||
|
||||
def store_timeline_entries(project, data):
|
||||
results = []
|
||||
for timeline in data.get("timeline", []):
|
||||
tl = service.store_timeline_entry(project, timeline)
|
||||
results.append(tl)
|
||||
return results
|
||||
|
||||
|
||||
def store_issues(project, data):
|
||||
issues = []
|
||||
for issue in data.get("issues", []):
|
||||
issues.append(service.store_issue(project, issue))
|
||||
return issues
|
||||
|
||||
|
||||
def store_tags_colors(project, data):
|
||||
project.tags_colors = data.get("tags_colors", [])
|
||||
project.save()
|
||||
return None
|
||||
|
||||
|
||||
def dict_to_project(data, owner=None):
|
||||
if owner:
|
||||
data["owner"] = owner.email
|
||||
|
||||
# Validate if the owner can have this project
|
||||
is_private = data.get("is_private", False)
|
||||
total_memberships = len([m for m in data.get("memberships", [])
|
||||
if m.get("email", None) != data["owner"]])
|
||||
total_memberships = total_memberships + 1 # 1 is the owner
|
||||
(enough_slots, error_message) = users_service.has_available_slot_for_import_new_project(
|
||||
owner,
|
||||
is_private,
|
||||
total_memberships
|
||||
)
|
||||
if not enough_slots:
|
||||
raise TaigaImportError(error_message)
|
||||
|
||||
project_serialized = service.store_project(data)
|
||||
|
||||
if not project_serialized:
|
||||
raise TaigaImportError(_("error importing project data"))
|
||||
|
||||
proj = project_serialized.object
|
||||
|
||||
service.store_choices(proj, data, "points", serializers.PointsExportSerializer)
|
||||
service.store_choices(proj, data, "issue_types", serializers.IssueTypeExportSerializer)
|
||||
service.store_choices(proj, data, "issue_statuses", serializers.IssueStatusExportSerializer)
|
||||
service.store_choices(proj, data, "us_statuses", serializers.UserStoryStatusExportSerializer)
|
||||
service.store_choices(proj, data, "task_statuses", serializers.TaskStatusExportSerializer)
|
||||
service.store_choices(proj, data, "priorities", serializers.PriorityExportSerializer)
|
||||
service.store_choices(proj, data, "severities", serializers.SeverityExportSerializer)
|
||||
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing lists of project attributes"))
|
||||
|
||||
service.store_default_choices(proj, data)
|
||||
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing default project attributes values"))
|
||||
|
||||
service.store_custom_attributes(proj, data, "userstorycustomattributes",
|
||||
serializers.UserStoryCustomAttributeExportSerializer)
|
||||
service.store_custom_attributes(proj, data, "taskcustomattributes",
|
||||
serializers.TaskCustomAttributeExportSerializer)
|
||||
service.store_custom_attributes(proj, data, "issuecustomattributes",
|
||||
serializers.IssueCustomAttributeExportSerializer)
|
||||
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing custom attributes"))
|
||||
|
||||
service.store_roles(proj, data)
|
||||
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing roles"))
|
||||
|
||||
service.store_memberships(proj, data)
|
||||
|
||||
if proj.memberships.filter(user=proj.owner).count() == 0:
|
||||
if proj.roles.all().count() > 0:
|
||||
Membership.objects.create(
|
||||
project=proj,
|
||||
email=proj.owner.email,
|
||||
user=proj.owner,
|
||||
role=proj.roles.all().first(),
|
||||
is_admin=True
|
||||
)
|
||||
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing memberships"))
|
||||
|
||||
store_milestones(proj, data)
|
||||
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing sprints"))
|
||||
|
||||
store_wiki_pages(proj, data)
|
||||
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing wiki pages"))
|
||||
|
||||
store_wiki_links(proj, data)
|
||||
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing wiki links"))
|
||||
|
||||
store_issues(proj, data)
|
||||
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing issues"))
|
||||
|
||||
store_user_stories(proj, data)
|
||||
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing user stories"))
|
||||
|
||||
store_tasks(proj, data)
|
||||
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing tasks"))
|
||||
|
||||
store_tags_colors(proj, data)
|
||||
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing tags"))
|
||||
|
||||
store_timeline_entries(proj, data)
|
||||
if service.get_errors(clear=False):
|
||||
raise TaigaImportError(_("error importing timelines"))
|
||||
|
||||
proj.refresh_totals()
|
||||
return proj
|
|
@ -0,0 +1,23 @@
|
|||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
class TaigaImportError(Exception):
|
||||
def __init__(self, message, project, errors=[]):
|
||||
self.message = message
|
||||
self.project = project
|
||||
self.errors = errors
|
|
@ -18,25 +18,45 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from taiga.projects.models import Project
|
||||
from taiga.export_import.renderers import ExportRenderer
|
||||
from taiga.export_import.service import render_project
|
||||
from taiga.export_import.services import render_project
|
||||
|
||||
|
||||
import resource
|
||||
import os
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
args = '<project_slug project_slug ...>'
|
||||
help = 'Export a project to json'
|
||||
renderer_context = {"indent": 4}
|
||||
renderer = ExportRenderer()
|
||||
help = "Export projects to json"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("project_slugs",
|
||||
nargs="+",
|
||||
help="<project_slug project_slug ...>")
|
||||
|
||||
parser.add_argument("-d", "--dst_dir",
|
||||
action="store",
|
||||
dest="dst_dir",
|
||||
default="./",
|
||||
metavar="DIR",
|
||||
help="Directory to save the json files. ('./' by default)")
|
||||
|
||||
def handle(self, *args, **options):
|
||||
for project_slug in args:
|
||||
dst_dir = options["dst_dir"]
|
||||
|
||||
if not os.path.exists(dst_dir):
|
||||
raise CommandError("Directory {} does not exist.".format(dst_dir))
|
||||
|
||||
if not os.path.isdir(dst_dir):
|
||||
raise CommandError("'{}' must be a directory, not a file.".format(dst_dir))
|
||||
|
||||
project_slugs = options["project_slugs"]
|
||||
|
||||
for project_slug in project_slugs:
|
||||
try:
|
||||
project = Project.objects.get(slug=project_slug)
|
||||
except Project.DoesNotExist:
|
||||
raise CommandError('Project "%s" does not exist' % project_slug)
|
||||
raise CommandError("Project '{}' does not exist".format(project_slug))
|
||||
|
||||
with open('%s.json'%(project_slug), 'w') as outfile:
|
||||
render_project(project, outfile)
|
||||
dst_file = os.path.join(dst_dir, "{}.json".format(project_slug))
|
||||
with open(src_file, "w") as f:
|
||||
render_project(project, f)
|
||||
|
||||
print("-> Generate dump of project '{}' in '{}'".format(project.name, src_file))
|
||||
|
|
|
@ -21,10 +21,10 @@ from django.db.models import signals
|
|||
from optparse import make_option
|
||||
|
||||
from taiga.base.utils import json
|
||||
from taiga.projects.models import Project
|
||||
from taiga.export_import.import services
|
||||
from taiga.export_import.exceptions as err
|
||||
from taiga.export_import.renderers import ExportRenderer
|
||||
from taiga.export_import.dump_service import dict_to_project, TaigaImportError
|
||||
from taiga.export_import.service import get_errors
|
||||
from taiga.projects.models import Project
|
||||
from taiga.users.models import User
|
||||
|
||||
|
||||
|
@ -61,8 +61,12 @@ class Command(BaseCommand):
|
|||
signals.post_delete.receivers = receivers_back
|
||||
|
||||
user = User.objects.get(email=args[1])
|
||||
dict_to_project(data, user)
|
||||
except TaigaImportError as e:
|
||||
services.store_project_from_dict(data, user)
|
||||
except err.TaigaImportError as e:
|
||||
if e.project:
|
||||
e.project.delete_related_content()
|
||||
e.project.delete()
|
||||
|
||||
print("ERROR:", end=" ")
|
||||
print(e.message)
|
||||
print(get_errors())
|
||||
print(services.store.get_errors())
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# This makes all code that import services works and
|
||||
# is not the baddest practice ;)
|
||||
|
||||
from .render import render_project
|
||||
from . import render
|
||||
|
||||
from .store import store_project_from_dict
|
||||
from . import store
|
||||
|
|
@ -0,0 +1,124 @@
|
|||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# This makes all code that import services works and
|
||||
# is not the baddest practice ;)
|
||||
|
||||
import base64
|
||||
import gc
|
||||
import os
|
||||
|
||||
from django.core.files.storage import default_storage
|
||||
|
||||
from taiga.base.utils import json
|
||||
from taiga.timeline.service import get_project_timeline
|
||||
from taiga.base.api.fields import get_component
|
||||
|
||||
from .. import serializers
|
||||
|
||||
|
||||
def render_project(project, outfile, chunk_size = 8190):
|
||||
serializer = serializers.ProjectExportSerializer(project)
|
||||
outfile.write('{\n')
|
||||
|
||||
first_field = True
|
||||
for field_name in serializer.fields.keys():
|
||||
# Avoid writing "," in the last element
|
||||
if not first_field:
|
||||
outfile.write(",\n")
|
||||
else:
|
||||
first_field = False
|
||||
|
||||
field = serializer.fields.get(field_name)
|
||||
field.initialize(parent=serializer, field_name=field_name)
|
||||
|
||||
# These four "special" fields hava attachments so we use them in a special way
|
||||
if field_name in ["wiki_pages", "user_stories", "tasks", "issues"]:
|
||||
value = get_component(project, field_name)
|
||||
outfile.write('"{}": [\n'.format(field_name))
|
||||
|
||||
attachments_field = field.fields.pop("attachments", None)
|
||||
if attachments_field:
|
||||
attachments_field.initialize(parent=field, field_name="attachments")
|
||||
|
||||
first_item = True
|
||||
for item in value.iterator():
|
||||
# Avoid writing "," in the last element
|
||||
if not first_item:
|
||||
outfile.write(",\n")
|
||||
else:
|
||||
first_item = False
|
||||
|
||||
|
||||
dumped_value = json.dumps(field.to_native(item))
|
||||
writing_value = dumped_value[:-1]+ ',\n "attachments": [\n'
|
||||
outfile.write(writing_value)
|
||||
|
||||
first_attachment = True
|
||||
for attachment in item.attachments.iterator():
|
||||
# Avoid writing "," in the last element
|
||||
if not first_attachment:
|
||||
outfile.write(",\n")
|
||||
else:
|
||||
first_attachment = False
|
||||
|
||||
# Write all the data expect the serialized file
|
||||
attachment_serializer = serializers.AttachmentExportSerializer(instance=attachment)
|
||||
attached_file_serializer = attachment_serializer.fields.pop("attached_file")
|
||||
dumped_value = json.dumps(attachment_serializer.data)
|
||||
dumped_value = dumped_value[:-1] + ',\n "attached_file":{\n "data":"'
|
||||
outfile.write(dumped_value)
|
||||
|
||||
# We write the attached_files by chunks so the memory used is not increased
|
||||
attachment_file = attachment.attached_file
|
||||
if default_storage.exists(attachment_file.name):
|
||||
with default_storage.open(attachment_file.name) as f:
|
||||
while True:
|
||||
bin_data = f.read(chunk_size)
|
||||
if not bin_data:
|
||||
break
|
||||
|
||||
b64_data = base64.b64encode(bin_data).decode('utf-8')
|
||||
outfile.write(b64_data)
|
||||
|
||||
outfile.write('", \n "name":"{}"}}\n}}'.format(
|
||||
os.path.basename(attachment_file.name)))
|
||||
|
||||
outfile.write(']}')
|
||||
outfile.flush()
|
||||
gc.collect()
|
||||
outfile.write(']')
|
||||
|
||||
else:
|
||||
value = field.field_to_native(project, field_name)
|
||||
outfile.write('"{}": {}'.format(field_name, json.dumps(value)))
|
||||
|
||||
# Generate the timeline
|
||||
outfile.write(',\n"timeline": [\n')
|
||||
first_timeline = True
|
||||
for timeline_item in get_project_timeline(project).iterator():
|
||||
# Avoid writing "," in the last element
|
||||
if not first_timeline:
|
||||
outfile.write(",\n")
|
||||
else:
|
||||
first_timeline = False
|
||||
|
||||
dumped_value = json.dumps(serializers.TimelineExportSerializer(timeline_item).data)
|
||||
outfile.write(dumped_value)
|
||||
|
||||
outfile.write(']}\n')
|
||||
|
|
@ -15,30 +15,35 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import base64
|
||||
import gc
|
||||
import resource
|
||||
# This makes all code that import services works and
|
||||
# is not the baddest practice ;)
|
||||
|
||||
import os
|
||||
import os.path as path
|
||||
import uuid
|
||||
|
||||
from unidecode import unidecode
|
||||
|
||||
from django.template.defaultfilters import slugify
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.files.storage import default_storage
|
||||
from django.template.defaultfilters import slugify
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from taiga.base.utils import json
|
||||
from taiga.projects.history.services import make_key_from_model_object, take_snapshot
|
||||
from taiga.timeline.service import build_project_namespace, get_project_timeline
|
||||
from taiga.projects.models import Membership
|
||||
from taiga.projects.references import sequences as seq
|
||||
from taiga.projects.references import models as refs
|
||||
from taiga.projects.userstories.models import RolePoints
|
||||
from taiga.projects.services import find_invited_user
|
||||
from taiga.base.api.fields import get_component
|
||||
from taiga.timeline.service import build_project_namespace
|
||||
from taiga.users import services as users_service
|
||||
|
||||
from . import serializers
|
||||
from .. import exceptions as err
|
||||
from .. import serializers
|
||||
|
||||
|
||||
########################################################################
|
||||
## Manage errors
|
||||
########################################################################
|
||||
|
||||
_errors_log = {}
|
||||
|
||||
|
@ -57,97 +62,16 @@ def add_errors(section, errors):
|
|||
_errors_log[section] = [errors]
|
||||
|
||||
|
||||
def render_project(project, outfile, chunk_size = 8190):
|
||||
serializer = serializers.ProjectExportSerializer(project)
|
||||
outfile.write('{\n')
|
||||
|
||||
first_field = True
|
||||
for field_name in serializer.fields.keys():
|
||||
# Avoid writing "," in the last element
|
||||
if not first_field:
|
||||
outfile.write(",\n")
|
||||
else:
|
||||
first_field = False
|
||||
|
||||
field = serializer.fields.get(field_name)
|
||||
field.initialize(parent=serializer, field_name=field_name)
|
||||
|
||||
# These four "special" fields hava attachments so we use them in a special way
|
||||
if field_name in ["wiki_pages", "user_stories", "tasks", "issues"]:
|
||||
value = get_component(project, field_name)
|
||||
outfile.write('"{}": [\n'.format(field_name))
|
||||
|
||||
attachments_field = field.fields.pop("attachments", None)
|
||||
if attachments_field:
|
||||
attachments_field.initialize(parent=field, field_name="attachments")
|
||||
|
||||
first_item = True
|
||||
for item in value.iterator():
|
||||
# Avoid writing "," in the last element
|
||||
if not first_item:
|
||||
outfile.write(",\n")
|
||||
else:
|
||||
first_item = False
|
||||
def reset_errors():
|
||||
_errors_log.clear()
|
||||
|
||||
|
||||
dumped_value = json.dumps(field.to_native(item))
|
||||
writing_value = dumped_value[:-1]+ ',\n "attachments": [\n'
|
||||
outfile.write(writing_value)
|
||||
########################################################################
|
||||
## Store functions
|
||||
########################################################################
|
||||
|
||||
first_attachment = True
|
||||
for attachment in item.attachments.iterator():
|
||||
# Avoid writing "," in the last element
|
||||
if not first_attachment:
|
||||
outfile.write(",\n")
|
||||
else:
|
||||
first_attachment = False
|
||||
|
||||
# Write all the data expect the serialized file
|
||||
attachment_serializer = serializers.AttachmentExportSerializer(instance=attachment)
|
||||
attached_file_serializer = attachment_serializer.fields.pop("attached_file")
|
||||
dumped_value = json.dumps(attachment_serializer.data)
|
||||
dumped_value = dumped_value[:-1] + ',\n "attached_file":{\n "data":"'
|
||||
outfile.write(dumped_value)
|
||||
|
||||
# We write the attached_files by chunks so the memory used is not increased
|
||||
attachment_file = attachment.attached_file
|
||||
if default_storage.exists(attachment_file.name):
|
||||
with default_storage.open(attachment_file.name) as f:
|
||||
while True:
|
||||
bin_data = f.read(chunk_size)
|
||||
if not bin_data:
|
||||
break
|
||||
|
||||
b64_data = base64.b64encode(bin_data).decode('utf-8')
|
||||
outfile.write(b64_data)
|
||||
|
||||
outfile.write('", \n "name":"{}"}}\n}}'.format(
|
||||
os.path.basename(attachment_file.name)))
|
||||
|
||||
outfile.write(']}')
|
||||
outfile.flush()
|
||||
gc.collect()
|
||||
outfile.write(']')
|
||||
|
||||
else:
|
||||
value = field.field_to_native(project, field_name)
|
||||
outfile.write('"{}": {}'.format(field_name, json.dumps(value)))
|
||||
|
||||
# Generate the timeline
|
||||
outfile.write(',\n"timeline": [\n')
|
||||
first_timeline = True
|
||||
for timeline_item in get_project_timeline(project).iterator():
|
||||
# Avoid writing "," in the last element
|
||||
if not first_timeline:
|
||||
outfile.write(",\n")
|
||||
else:
|
||||
first_timeline = False
|
||||
|
||||
dumped_value = json.dumps(serializers.TimelineExportSerializer(timeline_item).data)
|
||||
outfile.write(dumped_value)
|
||||
|
||||
outfile.write(']}\n')
|
||||
|
||||
## PROJECT
|
||||
|
||||
def store_project(data):
|
||||
project_data = {}
|
||||
|
@ -175,43 +99,19 @@ def store_project(data):
|
|||
return None
|
||||
|
||||
|
||||
def _store_choice(project, data, field, serializer):
|
||||
serialized = serializer(data=data)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized.object
|
||||
add_errors(field, serialized.errors)
|
||||
return None
|
||||
## MISC
|
||||
|
||||
def _use_id_instead_name_as_key_in_custom_attributes_values(custom_attributes, values):
|
||||
ret = {}
|
||||
for attr in custom_attributes:
|
||||
value = values.get(attr["name"], None)
|
||||
if value is not None:
|
||||
ret[str(attr["id"])] = value
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def store_choices(project, data, field, serializer):
|
||||
result = []
|
||||
for choice_data in data.get(field, []):
|
||||
result.append(_store_choice(project, choice_data, field, serializer))
|
||||
return result
|
||||
|
||||
|
||||
def _store_custom_attribute(project, data, field, serializer):
|
||||
serialized = serializer(data=data)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized.object
|
||||
add_errors(field, serialized.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_custom_attributes(project, data, field, serializer):
|
||||
result = []
|
||||
for custom_attribute_data in data.get(field, []):
|
||||
result.append(_store_custom_attribute(project, custom_attribute_data, field, serializer))
|
||||
return result
|
||||
|
||||
|
||||
def store_custom_attributes_values(obj, data_values, obj_field, serializer_class):
|
||||
def _store_custom_attributes_values(obj, data_values, obj_field, serializer_class):
|
||||
data = {
|
||||
obj_field: obj.id,
|
||||
"attributes_values": data_values,
|
||||
|
@ -231,17 +131,39 @@ def store_custom_attributes_values(obj, data_values, obj_field, serializer_class
|
|||
return None
|
||||
|
||||
|
||||
def _use_id_instead_name_as_key_in_custom_attributes_values(custom_attributes, values):
|
||||
ret = {}
|
||||
for attr in custom_attributes:
|
||||
value = values.get(attr["name"], None)
|
||||
if value is not None:
|
||||
ret[str(attr["id"])] = value
|
||||
|
||||
return ret
|
||||
def _store_attachment(project, obj, attachment):
|
||||
serialized = serializers.AttachmentExportSerializer(data=attachment)
|
||||
if serialized.is_valid():
|
||||
serialized.object.content_type = ContentType.objects.get_for_model(obj.__class__)
|
||||
serialized.object.object_id = obj.id
|
||||
serialized.object.project = project
|
||||
if serialized.object.owner is None:
|
||||
serialized.object.owner = serialized.object.project.owner
|
||||
serialized.object._importing = True
|
||||
serialized.object.size = serialized.object.attached_file.size
|
||||
serialized.object.name = os.path.basename(serialized.object.attached_file.name)
|
||||
serialized.save()
|
||||
return serialized
|
||||
add_errors("attachments", serialized.errors)
|
||||
return serialized
|
||||
|
||||
|
||||
def store_role(project, role):
|
||||
def _store_history(project, obj, history):
|
||||
serialized = serializers.HistoryExportSerializer(data=history, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
serialized.object.key = make_key_from_model_object(obj)
|
||||
if serialized.object.diff is None:
|
||||
serialized.object.diff = []
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized
|
||||
add_errors("history", serialized.errors)
|
||||
return serialized
|
||||
|
||||
|
||||
## ROLES
|
||||
|
||||
def _store_role(project, role):
|
||||
serialized = serializers.RoleExportSerializer(data=role)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
|
@ -255,14 +177,60 @@ def store_role(project, role):
|
|||
def store_roles(project, data):
|
||||
results = []
|
||||
for role in data.get("roles", []):
|
||||
serialized = store_role(project, role)
|
||||
serialized = _store_role(project, role)
|
||||
if serialized:
|
||||
results.append(serialized)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def store_default_choices(project, data):
|
||||
## MEMGERSHIPS
|
||||
|
||||
def _store_membership(project, membership):
|
||||
serialized = serializers.MembershipExportSerializer(data=membership, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.object.token = str(uuid.uuid1())
|
||||
serialized.object.user = find_invited_user(serialized.object.email,
|
||||
default=serialized.object.user)
|
||||
serialized.save()
|
||||
return serialized
|
||||
|
||||
add_errors("memberships", serialized.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_memberships(project, data):
|
||||
results = []
|
||||
for membership in data.get("memberships", []):
|
||||
results.append(_store_membership(project, membership))
|
||||
return results
|
||||
|
||||
|
||||
## PROJECT ATTRIBUTES
|
||||
|
||||
def _store_project_attribute_value(project, data, field, serializer):
|
||||
serialized = serializer(data=data)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized.object
|
||||
add_errors(field, serialized.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_project_attributes_values(project, data, field, serializer):
|
||||
result = []
|
||||
for choice_data in data.get(field, []):
|
||||
result.append(_store_project_attribute_value(project, choice_data, field, serializer))
|
||||
return result
|
||||
|
||||
|
||||
## DEFAULT PROJECT ATTRIBUTES VALUES
|
||||
|
||||
def store_default_project_attributes_values(project, data):
|
||||
def helper(project, field, related, data):
|
||||
if field in data:
|
||||
value = related.all().get(name=data[field])
|
||||
|
@ -281,75 +249,27 @@ def store_default_choices(project, data):
|
|||
project.save()
|
||||
|
||||
|
||||
def store_membership(project, membership):
|
||||
serialized = serializers.MembershipExportSerializer(data=membership, context={"project": project})
|
||||
## CUSTOM ATTRIBUTES
|
||||
|
||||
def _store_custom_attribute(project, data, field, serializer):
|
||||
serialized = serializer(data=data)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.object.token = str(uuid.uuid1())
|
||||
serialized.object.user = find_invited_user(serialized.object.email,
|
||||
default=serialized.object.user)
|
||||
serialized.save()
|
||||
return serialized
|
||||
|
||||
add_errors("memberships", serialized.errors)
|
||||
return serialized.object
|
||||
add_errors(field, serialized.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_memberships(project, data):
|
||||
results = []
|
||||
for membership in data.get("memberships", []):
|
||||
results.append(store_membership(project, membership))
|
||||
return results
|
||||
def store_custom_attributes(project, data, field, serializer):
|
||||
result = []
|
||||
for custom_attribute_data in data.get(field, []):
|
||||
result.append(_store_custom_attribute(project, custom_attribute_data, field, serializer))
|
||||
return result
|
||||
|
||||
|
||||
def store_task(project, data):
|
||||
if "status" not in data and project.default_task_status:
|
||||
data["status"] = project.default_task_status.name
|
||||
|
||||
serialized = serializers.TaskExportSerializer(data=data, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
if serialized.object.owner is None:
|
||||
serialized.object.owner = serialized.object.project.owner
|
||||
serialized.object._importing = True
|
||||
serialized.object._not_notify = True
|
||||
|
||||
serialized.save()
|
||||
serialized.save_watchers()
|
||||
|
||||
if serialized.object.ref:
|
||||
sequence_name = refs.make_sequence_name(project)
|
||||
if not seq.exists(sequence_name):
|
||||
seq.create(sequence_name)
|
||||
seq.set_max(sequence_name, serialized.object.ref)
|
||||
else:
|
||||
serialized.object.ref, _ = refs.make_reference(serialized.object, project)
|
||||
serialized.object.save()
|
||||
|
||||
for task_attachment in data.get("attachments", []):
|
||||
store_attachment(project, serialized.object, task_attachment)
|
||||
|
||||
history_entries = data.get("history", [])
|
||||
for history in history_entries:
|
||||
store_history(project, serialized.object, history)
|
||||
|
||||
if not history_entries:
|
||||
take_snapshot(serialized.object, user=serialized.object.owner)
|
||||
|
||||
custom_attributes_values = data.get("custom_attributes_values", None)
|
||||
if custom_attributes_values:
|
||||
custom_attributes = serialized.object.project.taskcustomattributes.all().values('id', 'name')
|
||||
custom_attributes_values = _use_id_instead_name_as_key_in_custom_attributes_values(
|
||||
custom_attributes, custom_attributes_values)
|
||||
store_custom_attributes_values(serialized.object, custom_attributes_values,
|
||||
"task", serializers.TaskCustomAttributesValuesExportSerializer)
|
||||
|
||||
return serialized
|
||||
|
||||
add_errors("tasks", serialized.errors)
|
||||
return None
|
||||
|
||||
## MILESTONE
|
||||
|
||||
def store_milestone(project, milestone):
|
||||
serialized = serializers.MilestoneExportSerializer(data=milestone, project=project)
|
||||
|
@ -368,90 +288,17 @@ def store_milestone(project, milestone):
|
|||
return None
|
||||
|
||||
|
||||
def store_attachment(project, obj, attachment):
|
||||
serialized = serializers.AttachmentExportSerializer(data=attachment)
|
||||
if serialized.is_valid():
|
||||
serialized.object.content_type = ContentType.objects.get_for_model(obj.__class__)
|
||||
serialized.object.object_id = obj.id
|
||||
serialized.object.project = project
|
||||
if serialized.object.owner is None:
|
||||
serialized.object.owner = serialized.object.project.owner
|
||||
serialized.object._importing = True
|
||||
serialized.object.size = serialized.object.attached_file.size
|
||||
serialized.object.name = path.basename(serialized.object.attached_file.name)
|
||||
serialized.save()
|
||||
return serialized
|
||||
add_errors("attachments", serialized.errors)
|
||||
return serialized
|
||||
def store_milestones(project, data):
|
||||
results = []
|
||||
for milestone_data in data.get("milestones", []):
|
||||
milestone = store_milestone(project, milestone_data)
|
||||
results.append(milestone)
|
||||
return results
|
||||
|
||||
|
||||
def store_timeline_entry(project, timeline):
|
||||
serialized = serializers.TimelineExportSerializer(data=timeline, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object.namespace = build_project_namespace(project)
|
||||
serialized.object.object_id = project.id
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized
|
||||
add_errors("timeline", serialized.errors)
|
||||
return serialized
|
||||
## USER STORIES
|
||||
|
||||
|
||||
def store_history(project, obj, history):
|
||||
serialized = serializers.HistoryExportSerializer(data=history, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
serialized.object.key = make_key_from_model_object(obj)
|
||||
if serialized.object.diff is None:
|
||||
serialized.object.diff = []
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized
|
||||
add_errors("history", serialized.errors)
|
||||
return serialized
|
||||
|
||||
|
||||
def store_wiki_page(project, wiki_page):
|
||||
wiki_page["slug"] = slugify(unidecode(wiki_page.get("slug", "")))
|
||||
serialized = serializers.WikiPageExportSerializer(data=wiki_page)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
if serialized.object.owner is None:
|
||||
serialized.object.owner = serialized.object.project.owner
|
||||
serialized.object._importing = True
|
||||
serialized.object._not_notify = True
|
||||
serialized.save()
|
||||
serialized.save_watchers()
|
||||
|
||||
for attachment in wiki_page.get("attachments", []):
|
||||
store_attachment(project, serialized.object, attachment)
|
||||
|
||||
history_entries = wiki_page.get("history", [])
|
||||
for history in history_entries:
|
||||
store_history(project, serialized.object, history)
|
||||
|
||||
if not history_entries:
|
||||
take_snapshot(serialized.object, user=serialized.object.owner)
|
||||
|
||||
return serialized
|
||||
|
||||
add_errors("wiki_pages", serialized.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_wiki_link(project, wiki_link):
|
||||
serialized = serializers.WikiLinkExportSerializer(data=wiki_link)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized
|
||||
|
||||
add_errors("wiki_links", serialized.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_role_point(project, us, role_point):
|
||||
def _store_role_point(project, us, role_point):
|
||||
serialized = serializers.RolePointsExportSerializer(data=role_point, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
try:
|
||||
|
@ -468,7 +315,6 @@ def store_role_point(project, us, role_point):
|
|||
add_errors("role_points", serialized.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_user_story(project, data):
|
||||
if "status" not in data and project.default_us_status:
|
||||
data["status"] = project.default_us_status.name
|
||||
|
@ -497,14 +343,14 @@ def store_user_story(project, data):
|
|||
serialized.object.save()
|
||||
|
||||
for us_attachment in data.get("attachments", []):
|
||||
store_attachment(project, serialized.object, us_attachment)
|
||||
_store_attachment(project, serialized.object, us_attachment)
|
||||
|
||||
for role_point in data.get("role_points", []):
|
||||
store_role_point(project, serialized.object, role_point)
|
||||
_store_role_point(project, serialized.object, role_point)
|
||||
|
||||
history_entries = data.get("history", [])
|
||||
for history in history_entries:
|
||||
store_history(project, serialized.object, history)
|
||||
_store_history(project, serialized.object, history)
|
||||
|
||||
if not history_entries:
|
||||
take_snapshot(serialized.object, user=serialized.object.owner)
|
||||
|
@ -514,7 +360,7 @@ def store_user_story(project, data):
|
|||
custom_attributes = serialized.object.project.userstorycustomattributes.all().values('id', 'name')
|
||||
custom_attributes_values = _use_id_instead_name_as_key_in_custom_attributes_values(
|
||||
custom_attributes, custom_attributes_values)
|
||||
store_custom_attributes_values(serialized.object, custom_attributes_values,
|
||||
_store_custom_attributes_values(serialized.object, custom_attributes_values,
|
||||
"user_story", serializers.UserStoryCustomAttributesValuesExportSerializer)
|
||||
|
||||
return serialized
|
||||
|
@ -523,6 +369,74 @@ def store_user_story(project, data):
|
|||
return None
|
||||
|
||||
|
||||
def store_user_stories(project, data):
|
||||
results = []
|
||||
for userstory in data.get("user_stories", []):
|
||||
us = store_user_story(project, userstory)
|
||||
results.append(us)
|
||||
return results
|
||||
|
||||
|
||||
## TASKS
|
||||
|
||||
def store_task(project, data):
|
||||
if "status" not in data and project.default_task_status:
|
||||
data["status"] = project.default_task_status.name
|
||||
|
||||
serialized = serializers.TaskExportSerializer(data=data, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
if serialized.object.owner is None:
|
||||
serialized.object.owner = serialized.object.project.owner
|
||||
serialized.object._importing = True
|
||||
serialized.object._not_notify = True
|
||||
|
||||
serialized.save()
|
||||
serialized.save_watchers()
|
||||
|
||||
if serialized.object.ref:
|
||||
sequence_name = refs.make_sequence_name(project)
|
||||
if not seq.exists(sequence_name):
|
||||
seq.create(sequence_name)
|
||||
seq.set_max(sequence_name, serialized.object.ref)
|
||||
else:
|
||||
serialized.object.ref, _ = refs.make_reference(serialized.object, project)
|
||||
serialized.object.save()
|
||||
|
||||
for task_attachment in data.get("attachments", []):
|
||||
_store_attachment(project, serialized.object, task_attachment)
|
||||
|
||||
history_entries = data.get("history", [])
|
||||
for history in history_entries:
|
||||
_store_history(project, serialized.object, history)
|
||||
|
||||
if not history_entries:
|
||||
take_snapshot(serialized.object, user=serialized.object.owner)
|
||||
|
||||
custom_attributes_values = data.get("custom_attributes_values", None)
|
||||
if custom_attributes_values:
|
||||
custom_attributes = serialized.object.project.taskcustomattributes.all().values('id', 'name')
|
||||
custom_attributes_values = _use_id_instead_name_as_key_in_custom_attributes_values(
|
||||
custom_attributes, custom_attributes_values)
|
||||
_store_custom_attributes_values(serialized.object, custom_attributes_values,
|
||||
"task", serializers.TaskCustomAttributesValuesExportSerializer)
|
||||
|
||||
return serialized
|
||||
|
||||
add_errors("tasks", serialized.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_tasks(project, data):
|
||||
results = []
|
||||
for task in data.get("tasks", []):
|
||||
task = store_task(project, task)
|
||||
results.append(task)
|
||||
return results
|
||||
|
||||
|
||||
## ISSUES
|
||||
|
||||
def store_issue(project, data):
|
||||
serialized = serializers.IssueExportSerializer(data=data, context={"project": project})
|
||||
|
||||
|
@ -558,11 +472,11 @@ def store_issue(project, data):
|
|||
serialized.object.save()
|
||||
|
||||
for attachment in data.get("attachments", []):
|
||||
store_attachment(project, serialized.object, attachment)
|
||||
_store_attachment(project, serialized.object, attachment)
|
||||
|
||||
history_entries = data.get("history", [])
|
||||
for history in history_entries:
|
||||
store_history(project, serialized.object, history)
|
||||
_store_history(project, serialized.object, history)
|
||||
|
||||
if not history_entries:
|
||||
take_snapshot(serialized.object, user=serialized.object.owner)
|
||||
|
@ -572,10 +486,248 @@ def store_issue(project, data):
|
|||
custom_attributes = serialized.object.project.issuecustomattributes.all().values('id', 'name')
|
||||
custom_attributes_values = _use_id_instead_name_as_key_in_custom_attributes_values(
|
||||
custom_attributes, custom_attributes_values)
|
||||
store_custom_attributes_values(serialized.object, custom_attributes_values,
|
||||
_store_custom_attributes_values(serialized.object, custom_attributes_values,
|
||||
"issue", serializers.IssueCustomAttributesValuesExportSerializer)
|
||||
|
||||
return serialized
|
||||
|
||||
add_errors("issues", serialized.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_issues(project, data):
|
||||
issues = []
|
||||
for issue in data.get("issues", []):
|
||||
issues.append(store_issue(project, issue))
|
||||
return issues
|
||||
|
||||
|
||||
## WIKI PAGES
|
||||
|
||||
def store_wiki_page(project, wiki_page):
|
||||
wiki_page["slug"] = slugify(unidecode(wiki_page.get("slug", "")))
|
||||
serialized = serializers.WikiPageExportSerializer(data=wiki_page)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
if serialized.object.owner is None:
|
||||
serialized.object.owner = serialized.object.project.owner
|
||||
serialized.object._importing = True
|
||||
serialized.object._not_notify = True
|
||||
serialized.save()
|
||||
serialized.save_watchers()
|
||||
|
||||
for attachment in wiki_page.get("attachments", []):
|
||||
_store_attachment(project, serialized.object, attachment)
|
||||
|
||||
history_entries = wiki_page.get("history", [])
|
||||
for history in history_entries:
|
||||
_store_history(project, serialized.object, history)
|
||||
|
||||
if not history_entries:
|
||||
take_snapshot(serialized.object, user=serialized.object.owner)
|
||||
|
||||
return serialized
|
||||
|
||||
add_errors("wiki_pages", serialized.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_wiki_pages(project, data):
|
||||
results = []
|
||||
for wiki_page in data.get("wiki_pages", []):
|
||||
results.append(store_wiki_page(project, wiki_page))
|
||||
return results
|
||||
|
||||
|
||||
## WIKI LINKS
|
||||
|
||||
def store_wiki_link(project, wiki_link):
|
||||
serialized = serializers.WikiLinkExportSerializer(data=wiki_link)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized
|
||||
|
||||
add_errors("wiki_links", serialized.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_wiki_links(project, data):
|
||||
results = []
|
||||
for wiki_link in data.get("wiki_links", []):
|
||||
results.append(store_wiki_link(project, wiki_link))
|
||||
return results
|
||||
|
||||
|
||||
## TAGS COLORS
|
||||
|
||||
def store_tags_colors(project, data):
|
||||
project.tags_colors = data.get("tags_colors", [])
|
||||
project.save()
|
||||
return None
|
||||
|
||||
|
||||
## TIMELINE
|
||||
|
||||
def _store_timeline_entry(project, timeline):
|
||||
serialized = serializers.TimelineExportSerializer(data=timeline, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object.namespace = build_project_namespace(project)
|
||||
serialized.object.object_id = project.id
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized
|
||||
add_errors("timeline", serialized.errors)
|
||||
return serialized
|
||||
|
||||
|
||||
def store_timeline_entries(project, data):
|
||||
results = []
|
||||
for timeline in data.get("timeline", []):
|
||||
tl = _store_timeline_entry(project, timeline)
|
||||
results.append(tl)
|
||||
return results
|
||||
|
||||
|
||||
#############################################
|
||||
## Store project dict
|
||||
#############################################
|
||||
|
||||
|
||||
def _validate_if_owner_have_enought_space_to_this_project(owner, data):
|
||||
# Validate if the owner can have this project
|
||||
data["owner"] = owner.email
|
||||
|
||||
is_private = data.get("is_private", False)
|
||||
total_memberships = len([m for m in data.get("memberships", [])
|
||||
if m.get("email", None) != data["owner"]])
|
||||
total_memberships = total_memberships + 1 # 1 is the owner
|
||||
(enough_slots, error_message) = users_service.has_available_slot_for_import_new_project(
|
||||
owner,
|
||||
is_private,
|
||||
total_memberships
|
||||
)
|
||||
if not enough_slots:
|
||||
raise err.TaigaImportError(error_message, None)
|
||||
|
||||
|
||||
def _create_project_object(data):
|
||||
# Create the project
|
||||
project_serialized = store_project(data)
|
||||
|
||||
if not project_serialized:
|
||||
raise err.TaigaImportError(_("error importing project data"), None)
|
||||
|
||||
return project_serialized.object if project_serialized else None
|
||||
|
||||
|
||||
def _create_membership_for_project_owner(project):
|
||||
if project.memberships.filter(user=project.owner).count() == 0:
|
||||
if project.roles.all().count() > 0:
|
||||
Membership.objects.create(
|
||||
project=project,
|
||||
email=project.owner.email,
|
||||
user=project.owner,
|
||||
role=project.roles.all().first(),
|
||||
is_admin=True
|
||||
)
|
||||
|
||||
|
||||
def _populate_project_object(project, data):
|
||||
def check_if_there_is_some_error(message=_("error importing project data"), project=None):
|
||||
errors = get_errors(clear=False)
|
||||
if errors:
|
||||
raise err.TaigaImportError(message, project, errors=errors)
|
||||
|
||||
# Create roles
|
||||
store_roles(project, data)
|
||||
check_if_there_is_some_error(_("error importing roles"), None)
|
||||
|
||||
# Create memberships
|
||||
store_memberships(project, data)
|
||||
_create_membership_for_project_owner(project)
|
||||
check_if_there_is_some_error(_("error importing memberships"), project)
|
||||
|
||||
# Create project attributes values
|
||||
store_project_attributes_values(project, data, "us_statuses", serializers.UserStoryStatusExportSerializer)
|
||||
store_project_attributes_values(project, data, "points", serializers.PointsExportSerializer)
|
||||
store_project_attributes_values(project, data, "task_statuses", serializers.TaskStatusExportSerializer)
|
||||
store_project_attributes_values(project, data, "issue_types", serializers.IssueTypeExportSerializer)
|
||||
store_project_attributes_values(project, data, "issue_statuses", serializers.IssueStatusExportSerializer)
|
||||
store_project_attributes_values(project, data, "priorities", serializers.PriorityExportSerializer)
|
||||
store_project_attributes_values(project, data, "severities", serializers.SeverityExportSerializer)
|
||||
check_if_there_is_some_error(_("error importing lists of project attributes"), project)
|
||||
|
||||
# Create default values for project attributes
|
||||
store_default_project_attributes_values(project, data)
|
||||
check_if_there_is_some_error(_("error importing default project attributes values"), project)
|
||||
|
||||
# Create custom attributes
|
||||
store_custom_attributes(project, data, "userstorycustomattributes",
|
||||
serializers.UserStoryCustomAttributeExportSerializer)
|
||||
store_custom_attributes(project, data, "taskcustomattributes",
|
||||
serializers.TaskCustomAttributeExportSerializer)
|
||||
store_custom_attributes(project, data, "issuecustomattributes",
|
||||
serializers.IssueCustomAttributeExportSerializer)
|
||||
check_if_there_is_some_error(_("error importing custom attributes"), project)
|
||||
|
||||
|
||||
# Create milestones
|
||||
store_milestones(project, data)
|
||||
check_if_there_is_some_error(_("error importing sprints"), project)
|
||||
|
||||
# Create user stories
|
||||
store_user_stories(project, data)
|
||||
check_if_there_is_some_error(_("error importing user stories"), project)
|
||||
|
||||
# Createer tasks
|
||||
store_tasks(project, data)
|
||||
check_if_there_is_some_error(_("error importing tasks"), project)
|
||||
|
||||
# Create issues
|
||||
store_issues(project, data)
|
||||
check_if_there_is_some_error(_("error importing issues"), project)
|
||||
|
||||
# Create wiki pages
|
||||
store_wiki_pages(project, data)
|
||||
check_if_there_is_some_error(_("error importing wiki pages"), project)
|
||||
|
||||
# Create wiki links
|
||||
store_wiki_links(project, data)
|
||||
check_if_there_is_some_error(_("error importing wiki links"), project)
|
||||
|
||||
# Create tags
|
||||
store_tags_colors(project, data)
|
||||
check_if_there_is_some_error(_("error importing tags"), project)
|
||||
|
||||
# Create timeline
|
||||
store_timeline_entries(project, data)
|
||||
check_if_there_is_some_error(_("error importing timelines"), project)
|
||||
|
||||
# Regenerate stats
|
||||
project.refresh_totals()
|
||||
|
||||
|
||||
def store_project_from_dict(data, owner=None):
|
||||
reset_errors()
|
||||
|
||||
# Validate
|
||||
if owner:
|
||||
_validate_if_owner_have_enought_space_to_this_project(owner, data)
|
||||
|
||||
# Create project
|
||||
project = _create_project_object(data)
|
||||
|
||||
# Populate project
|
||||
try:
|
||||
_populate_project_object(project, data)
|
||||
except err.TaigaImportError:
|
||||
# reraise known inport errors
|
||||
raise
|
||||
except:
|
||||
# reise unknown errors as import error
|
||||
raise err.TaigaImportError(_("unexpected error importing project"), project)
|
||||
|
||||
return project
|
|
@ -27,10 +27,11 @@ from django.conf import settings
|
|||
from django.utils.translation import ugettext as _
|
||||
|
||||
from taiga.base.mails import mail_builder
|
||||
from taiga.base.utils import json
|
||||
from taiga.celery import app
|
||||
|
||||
from .service import render_project
|
||||
from .dump_service import dict_to_project
|
||||
from . import exceptions as err
|
||||
from . import services
|
||||
from .renderers import ExportRenderer
|
||||
|
||||
logger = logging.getLogger('taiga.export_import')
|
||||
|
@ -46,7 +47,7 @@ def dump_project(self, user, project):
|
|||
try:
|
||||
url = default_storage.url(path)
|
||||
with default_storage.open(storage_path, mode="w") as outfile:
|
||||
render_project(project, outfile)
|
||||
services.render_project(project, outfile)
|
||||
|
||||
except Exception:
|
||||
# Error
|
||||
|
@ -77,28 +78,57 @@ def delete_project_dump(project_id, project_slug, task_id):
|
|||
default_storage.delete("exports/{}/{}-{}.json".format(project_id, project_slug, task_id))
|
||||
|
||||
|
||||
ADMIN_ERROR_LOAD_PROJECT_DUMP_MESSAGE = _("""
|
||||
|
||||
Error loading dump by {user_full_name} <{user_email}>:"
|
||||
|
||||
|
||||
REASON:
|
||||
-------
|
||||
{reason}
|
||||
|
||||
DETAILS:
|
||||
--------
|
||||
{details}
|
||||
|
||||
TRACE ERROR:
|
||||
------------""")
|
||||
|
||||
|
||||
@app.task
|
||||
def load_project_dump(user, dump):
|
||||
try:
|
||||
project = dict_to_project(dump, user)
|
||||
except Exception:
|
||||
# Error
|
||||
project = services.store_project_from_dict(dump, user)
|
||||
except err.TaigaImportError as e:
|
||||
# On Error
|
||||
## remove project
|
||||
if e.project:
|
||||
e.project.delete_related_content()
|
||||
e.project.delete()
|
||||
|
||||
## send email to the user
|
||||
error_subject = _("Error loading project dump")
|
||||
error_message = e.message or _("Error loading your project dump file")
|
||||
|
||||
ctx = {
|
||||
"user": user,
|
||||
"error_subject": _("Error loading project dump"),
|
||||
"error_message": _("Error loading project dump"),
|
||||
"error_subject": error_message,
|
||||
"error_message": error_subject,
|
||||
}
|
||||
email = mail_builder.import_error(user, ctx)
|
||||
email.send()
|
||||
logger.error('Error loading dump by %s <%s>',
|
||||
user,
|
||||
user.email,
|
||||
exc_info=sys.exc_info())
|
||||
|
||||
# TODO: [Rollback] Remove project because it can be corrupted
|
||||
## logged the error to sysadmins
|
||||
text = ADMIN_ERROR_LOAD_PROJECT_DUMP_MESSAGE.format(
|
||||
user_full_name=user,
|
||||
user_email=user.email,
|
||||
reason=e.message or _(" -- no detail info --"),
|
||||
details=json.dumps(e.errors, indent=4)
|
||||
)
|
||||
logger.error(text, exc_info=sys.exc_info())
|
||||
|
||||
else:
|
||||
# Success
|
||||
# On Success
|
||||
ctx = {"user": user, "project": project}
|
||||
email = mail_builder.load_dump(user, ctx)
|
||||
email.send()
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -20,7 +20,7 @@ import io
|
|||
from .. import factories as f
|
||||
|
||||
from taiga.base.utils import json
|
||||
from taiga.export_import.service import render_project
|
||||
from taiga.export_import.services import render_project
|
||||
|
||||
pytestmark = pytest.mark.django_db
|
||||
|
||||
|
|
Loading…
Reference in New Issue