I would like to test the import function from import_export django. When I run my test the code says I provide no dataset (None) even though I do provide a proper CSV file. Thus, my assertions (starting from self.assertIn('result', response.context)
) fail.
Why don't my resource class and def import_data
see the dataset I provide in response = self.client.post(import_url, data=data)
?
My test:
class ImportUserCase(TestCase):
def setUp(self) -> None:
self.admin = User.objects.create(login='admin', email='[email protected]', password='password', is_staff=True, is_superuser=True)
self.client = Client()
self.client.force_login(self.admin)
self.resource = UserImportResource()
def test_import_action(self) -> None:
import_url = '/admin/core/user/import/'
process_url = '/admin/core/user/process_import/'
input_format = '0'
filename = 'users.csv'
with open('users.csv', 'w') as f:
writer = csv.writer(f)
writer.writerow(UserImportResource.Meta.fields)
writer.writerow('test', '[email protected]', 'test, 'test', 'test)
with open(filename, "rb") as f:
data = {
'input_format': input_format,
'import_file': f,
}
response = self.client.post(import_url, data=data)
self.assertEqual(response.status_code, 200)
self.assertIn('result', response.context)
self.assertFalse(response.context['result'].has_errors())
self.assertIn('confirm_form', response.context)
confirm_form = response.context['confirm_form']
data = confirm_form.initial
self.assertEqual(data['original_file_name'], 'books.csv')
response = self.client.post(process_url, data,follow=True)
self.assertEqual(response.status_code, 200)
My resource:
class UserImportResource(resources.ModelResource):
class Meta:
model = User
instance_loader_class = UserLoader
fields = ('login', 'email', 'first_name', 'last_name', 'gender')
use_transactions = False
def get_import_id_fields(self):
return ['email']
def get_import_fields(self):
return [self.fields[f] for f in self.get_import_field_names()]
def get_import_field_names(self):
return ('login', 'email', 'first_name', 'last_name', 'gender')
def before_import(self, dataset, using_transactions, dry_run, **kwargs):
to_delete = []
for idx, row in enumerate(dataset.dict):
if not row['email'] or not row['email'].strip():
to_delete.append(idx)
for idx in sorted(to_delete, reverse=True):
del dataset[idx]
def before_import_row(self, row, **kwargs):
if not row['email']:
return
row['email'] = row['email'].strip()
def import_data(self, dataset, dry_run=False, raise_errors=False,
use_transactions=None, collect_failed_rows=False, **kwargs):
print(f'dataset: {dataset}') # says None instead of the real dataset
result = self.get_result_class()()
fns = self.get_import_field_names()
if header not in fns or header == 'id':
err_msg = 'Error'
result.append_base_error(self.get_error_result_class()(err_msg))
return result
user_logins_search = {str(row['login']).strip() for row in dataset.dict}
available_login_map = {}
for user_data in User.objects.filter(login__in=user_logins_search).values('login', 'email'):
available_login_map[user_data['login']] = user_data['email']
using_transactions = False if dry_run else True
with atomic_if_using_transaction(using_transactions):
return self.import_data_inner(dataset, dry_run, raise_errors, False, collect_failed_rows, **kwargs)
def get_user_visible_fields(self):
return self.get_import_fields()