Compare commits
75 Commits
0ffc474279
...
developmen
| Author | SHA1 | Date | |
|---|---|---|---|
| 858af1231b | |||
| 2c87642f56 | |||
| 1e773ef53d | |||
| 67fa1e23e7 | |||
| c02c165ff9 | |||
| f13851a30d | |||
| aa8e39c25b | |||
| 66554159ea | |||
| 13caa78087 | |||
| 6c291e2294 | |||
| 7618518dc7 | |||
| 8bea18e676 | |||
| 3b6deb0356 | |||
| 8449e2ef1b | |||
| 627168b82a | |||
| 8296214401 | |||
| 84ea3b35d2 | |||
| dbd0715c54 | |||
| 241a3551b7 | |||
| e00bd72a58 | |||
| 77f35fe350 | |||
| bec3405454 | |||
| ce9e45667f | |||
| 3209dd2d31 | |||
| 0a84ca6fe4 | |||
| cc81bc8a3d | |||
| 3258991014 | |||
| fb58e6c3aa | |||
| c5b87e8591 | |||
| 0dd145131f | |||
| 74a870380e | |||
| f798b72dbc | |||
| 93180edc0b | |||
| 0b08107c14 | |||
| 56025d77b1 | |||
| e68485c4cc | |||
| d643237a77 | |||
| 8c9f7aca02 | |||
| 9ed2a099e7 | |||
| 879e004e9b | |||
| 10c6eb9e79 | |||
| 36df84da98 | |||
| 915b0bf5a1 | |||
| da15cb5b99 | |||
| e94d5e4d1b | |||
| eaba79ee91 | |||
| e218c550e4 | |||
| c173a1cd85 | |||
| 08468fe67c | |||
| dd807f04be | |||
| c0e62541c3 | |||
| b93d9148f8 | |||
| 6d55f1cd3b | |||
| c3511590b7 | |||
| b4997da6b7 | |||
| 71726591e3 | |||
| afdb201b78 | |||
| 10a3572a8f | |||
| bf4b3d4422 | |||
| 6311a23609 | |||
| 757ad85688 | |||
| f2aab5c6b6 | |||
| e44121b10b | |||
| 0062d0c375 | |||
| 40e0daeb60 | |||
| 4cabee3502 | |||
| d0cbe435e3 | |||
| 853dc70396 | |||
| 6e4fc46271 | |||
| eef768e360 | |||
| 975132d21e | |||
| 81c272766e | |||
| 54047e625b | |||
| 057943b37f | |||
| 5a4fe87561 |
18
Dockerfile
18
Dockerfile
@@ -1,5 +1,5 @@
|
||||
# pull official base image
|
||||
FROM ghcr.io/seniorkian/python310-rasaddam:1.0.1
|
||||
FROM registry.hamdocker.ir/seniorkian/python310-rasaddam:1.0.0
|
||||
|
||||
# Create the app directory
|
||||
RUN #mkdir /app
|
||||
@@ -12,10 +12,22 @@ ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
# install dependencies
|
||||
ENV TZ="Asia/Tehran"
|
||||
RUN pip config --user set global.index https://mirror-pypi.runflare.com/simple
|
||||
RUN pip config --user set global.index-url https://mirror-pypi.runflare.com/simple
|
||||
RUN pip config --user set global.trusted-host mirror-pypi.runflare.com
|
||||
RUN pip install --upgrade pip
|
||||
#RUN apt-get update && apt-get install -y \
|
||||
# libcairo2 \
|
||||
# libpango-1.0-0 \
|
||||
# libpangocairo-1.0-0 \
|
||||
# libgdk-pixbuf2.0-0 \
|
||||
# libffi-dev \
|
||||
# shared-mime-info \
|
||||
# fonts-dejavu \
|
||||
# && rm -rf /var/lib/apt/lists/*
|
||||
COPY ./requirements.txt .
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --no-cache-dir -r requirements.txt
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# copy project
|
||||
COPY . /app/
|
||||
|
||||
@@ -30,7 +30,7 @@ from apps.authentication.models import (
|
||||
Organization,
|
||||
OrganizationType,
|
||||
BankAccountInformation,
|
||||
BlacklistedAccessToken
|
||||
BlacklistedAccessToken, OrganizationLocationInfo
|
||||
)
|
||||
from apps.authentication.tools import get_token_jti
|
||||
from apps.authorization.api.v1 import api as authorize_view
|
||||
@@ -261,14 +261,21 @@ class OrganizationViewSet(BaseViewSet, ModelViewSet, DynamicSearchMixin):
|
||||
def list(self, request, *args, **kwargs):
|
||||
""" all organization """
|
||||
org = get_organization_by_user(request.user)
|
||||
param = self.request.query_params # noqa
|
||||
|
||||
queryset = self.get_queryset(
|
||||
visibility_by_org_scope=True
|
||||
) if org.free_visibility_by_scope else self.get_queryset()
|
||||
|
||||
query = self.filter_query(queryset)
|
||||
# filter by organization type
|
||||
if 'org_type' in param.keys():
|
||||
queryset = queryset.filter(type__id=int(param.get('org_type', 0)))
|
||||
|
||||
page = self.paginate_queryset(query.order_by('-create_date')) # paginate queryset
|
||||
# filter on search
|
||||
if 'search' in param.keys():
|
||||
queryset = self.filter_query(queryset)
|
||||
|
||||
page = self.paginate_queryset(queryset.order_by('-create_date')) # paginate queryset
|
||||
|
||||
if page is not None: # noqa
|
||||
serializer = self.serializer_class(page, many=True)
|
||||
@@ -284,6 +291,18 @@ class OrganizationViewSet(BaseViewSet, ModelViewSet, DynamicSearchMixin):
|
||||
if serializer.is_valid():
|
||||
organization = serializer.save()
|
||||
|
||||
if 'addresses' in request.data.keys():
|
||||
# import multiple addresses with postal_code to orgs
|
||||
|
||||
address_obj_list = []
|
||||
for addr in request.data['addresses']:
|
||||
addr.update({'org': organization})
|
||||
address_obj_list.append(
|
||||
OrganizationLocationInfo(**addr)
|
||||
)
|
||||
|
||||
OrganizationLocationInfo.objects.bulk_create(address_obj_list)
|
||||
|
||||
if 'user_relations' in request.data.keys():
|
||||
user_relations = CustomOperations().custom_create( # create user relations
|
||||
request=request,
|
||||
@@ -315,6 +334,22 @@ class OrganizationViewSet(BaseViewSet, ModelViewSet, DynamicSearchMixin):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
organization = serializer.save()
|
||||
|
||||
if 'addresses' in request.data.keys():
|
||||
# import multiple addresses with postal_code to orgs
|
||||
|
||||
locations = organization.locations.all()
|
||||
locations.delete() # remove ex locations
|
||||
|
||||
# create new locations
|
||||
address_obj_list = []
|
||||
for addr in request.data['addresses']:
|
||||
addr.update({'org': organization})
|
||||
address_obj_list.append(
|
||||
OrganizationLocationInfo(**addr)
|
||||
)
|
||||
|
||||
OrganizationLocationInfo.objects.bulk_create(address_obj_list)
|
||||
|
||||
if 'user_relations' in request.data.keys():
|
||||
user_relations = CustomOperations().custom_update( # update user relations
|
||||
request=request,
|
||||
|
||||
@@ -258,6 +258,7 @@ class OrganizationSerializer(serializers.ModelSerializer):
|
||||
'address',
|
||||
'parent_organization',
|
||||
'national_unique_id',
|
||||
'unique_unit_identity',
|
||||
'company_code',
|
||||
'field_of_activity',
|
||||
'free_visibility_by_scope',
|
||||
@@ -363,6 +364,13 @@ class OrganizationSerializer(serializers.ModelSerializer):
|
||||
'name': city.name,
|
||||
} for city in instance.service_area.all()
|
||||
]
|
||||
|
||||
representation['addresses'] = [
|
||||
{
|
||||
"postal_code": addr.postal_code,
|
||||
"address": addr.address
|
||||
} for addr in instance.locations.all()
|
||||
]
|
||||
return representation
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
@@ -374,6 +382,7 @@ class OrganizationSerializer(serializers.ModelSerializer):
|
||||
instance.address = validated_data.get('address', instance.address)
|
||||
instance.parent_organization = validated_data.get('parent_organization', instance.parent_organization)
|
||||
instance.national_unique_id = validated_data.get('national_unique_id', instance.national_unique_id)
|
||||
instance.unique_unit_identity = validated_data.get('unique_unit_identity', instance.unique_unit_identity)
|
||||
instance.purchase_policy = validated_data.get('purchase_policy', instance.purchase_policy)
|
||||
instance.free_visibility_by_scope = validated_data.get(
|
||||
'free_visibility_by_scope',
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
# Generated by Django 5.0 on 2026-02-09 06:41
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('authentication', '0060_organization_ownership_code'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='unique_unit_identity',
|
||||
field=models.CharField(default='0', max_length=150),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,34 @@
|
||||
# Generated by Django 5.0 on 2026-02-09 06:57
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('authentication', '0061_organization_unique_unit_identity_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='OrganizationLocationInfo',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('create_date', models.DateTimeField(auto_now_add=True)),
|
||||
('modify_date', models.DateTimeField(auto_now=True)),
|
||||
('creator_info', models.CharField(max_length=100, null=True)),
|
||||
('modifier_info', models.CharField(max_length=100, null=True)),
|
||||
('trash', models.BooleanField(default=False)),
|
||||
('postal_code', models.CharField(blank=True, max_length=150, null=True)),
|
||||
('address', models.TextField(blank=True, max_length=2000, null=True)),
|
||||
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(class)s_createddby', to=settings.AUTH_USER_MODEL)),
|
||||
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(class)s_modifiedby', to=settings.AUTH_USER_MODEL)),
|
||||
('org', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='locations', to='authentication.organization')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -116,6 +116,7 @@ class Organization(BaseModel):
|
||||
null=True
|
||||
)
|
||||
national_unique_id = models.CharField(max_length=30, default="0")
|
||||
unique_unit_identity = models.CharField(max_length=150, default="0")
|
||||
activity_fields = (
|
||||
('CO', 'Country'),
|
||||
('PR', 'Province'),
|
||||
@@ -167,6 +168,23 @@ class Organization(BaseModel):
|
||||
super(Organization, self).save(*args, **kwargs)
|
||||
|
||||
|
||||
class OrganizationLocationInfo(BaseModel):
|
||||
org = models.ForeignKey(
|
||||
Organization,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='locations',
|
||||
null=True
|
||||
)
|
||||
postal_code = models.CharField(max_length=150, null=True, blank=True)
|
||||
address = models.TextField(max_length=2000, null=True, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.org.name}-{self.postal_code}-{self.address}'
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
super(OrganizationLocationInfo, self).save(*args, **kwargs)
|
||||
|
||||
|
||||
class OrganizationStats(BaseModel):
|
||||
organization = models.OneToOneField(
|
||||
Organization,
|
||||
|
||||
@@ -11,6 +11,7 @@ VISIBILITY_MAP = {
|
||||
'rancher': 'organization',
|
||||
'rancherorganizationlink': 'organization', # noqa
|
||||
'tagbatch': 'organization', # noqa
|
||||
'tagdistribution': ['assigner_org', 'assigned_org']
|
||||
|
||||
# 'deviceactivationcode': 'organization',
|
||||
# 'deviceversion': 'organization',
|
||||
|
||||
@@ -42,7 +42,6 @@ class HerdRancherSyncService:
|
||||
seen_in_batch = set()
|
||||
|
||||
for temp in queryset.iterator(chunk_size=batch_size):
|
||||
|
||||
rancher = rancher_map.get(temp.rancher_national_code)
|
||||
|
||||
if not rancher:
|
||||
@@ -52,7 +51,7 @@ class HerdRancherSyncService:
|
||||
national_code=temp.rancher_national_code,
|
||||
rancher_type='N',
|
||||
city_id=city_map.get(temp.city.strip()),
|
||||
province_id=30
|
||||
province_id=28
|
||||
)
|
||||
new_ranchers.append(rancher)
|
||||
rancher_map[temp.rancher_national_code] = rancher
|
||||
@@ -78,7 +77,7 @@ class HerdRancherSyncService:
|
||||
postal=temp.postal_code,
|
||||
unit_unique_id=temp.unit_unique_id,
|
||||
city_id=city_map.get(temp.city.strip()),
|
||||
province_id=30
|
||||
province_id=28
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import pandas as pd
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from apps.livestock.models import LiveStock, LiveStockType, LiveStockSpecies
|
||||
|
||||
from apps.herd.models import Herd
|
||||
from apps.livestock.models import LiveStock, LiveStockType, LiveStockSpecies
|
||||
from apps.tag.models import Tag
|
||||
|
||||
|
||||
@@ -16,6 +17,7 @@ class Command(BaseCommand):
|
||||
path = options['excel_path']
|
||||
df = pd.read_excel(path)
|
||||
records = df.to_dict(orient='records')
|
||||
print(records[1])
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"{len(records)} records loaded."))
|
||||
|
||||
@@ -46,7 +48,7 @@ class Command(BaseCommand):
|
||||
herd_cache[herd_code] = herd
|
||||
|
||||
tag_code = r.get('national_id_livestock_code')
|
||||
tag = Tag.objects.filter(code=tag_code).first()
|
||||
tag = Tag.objects.filter(tag_code=tag_code).first()
|
||||
if not tag:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
@@ -0,0 +1,144 @@
|
||||
from datetime import datetime
|
||||
|
||||
import jdatetime
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.herd.models import Herd
|
||||
from apps.livestock.models import (
|
||||
LiveStock,
|
||||
LiveStockSpecies,
|
||||
ExcelLiveStocks
|
||||
)
|
||||
|
||||
BATCH_SIZE = 100
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Import livestock from ExcelLiveStocks into LiveStock using bulk_create"
|
||||
|
||||
def normalize_herd_code(self, value, length=10):
|
||||
if value is None:
|
||||
return None
|
||||
return str(value).strip().zfill(length)
|
||||
|
||||
def parse_jalali_datetime(self, date_str: str):
|
||||
if not date_str:
|
||||
return None
|
||||
|
||||
year, month, day = map(int, date_str.split('/'))
|
||||
|
||||
# jalali → gregorian (date)
|
||||
g_date = jdatetime.date(year, month, day).togregorian()
|
||||
|
||||
# date → naive datetime
|
||||
naive_dt = datetime.combine(g_date, datetime.min.time())
|
||||
|
||||
# naive → aware (VERY IMPORTANT)
|
||||
return timezone.make_aware(naive_dt)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
qs = ExcelLiveStocks.objects.all()
|
||||
|
||||
if not qs.exists():
|
||||
self.stdout.write(self.style.WARNING("No records to import"))
|
||||
return
|
||||
|
||||
# ---------- preload lookups ----------
|
||||
herd_map = {
|
||||
h.code: h
|
||||
for h in Herd.objects.all()
|
||||
}
|
||||
|
||||
species_map = {
|
||||
s.name.strip(): s
|
||||
for s in LiveStockSpecies.objects.all()
|
||||
}
|
||||
|
||||
livestocks_to_create = []
|
||||
processed_ids = []
|
||||
|
||||
created_count = 0
|
||||
skipped = 0
|
||||
|
||||
self.stdout.write("Starting import...")
|
||||
|
||||
with transaction.atomic():
|
||||
for row in qs.iterator(chunk_size=BATCH_SIZE):
|
||||
herd = herd_map.get(self.normalize_herd_code(row.herd_code))
|
||||
# print(self.normalize_herd_code(row.herd_code))
|
||||
if not herd:
|
||||
# print("herd")
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
# species cache / create
|
||||
species_name = (row.species or "").strip()
|
||||
if not species_name:
|
||||
# print("species")
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
species = species_map.get(species_name)
|
||||
if not species:
|
||||
species = LiveStockSpecies.objects.create(
|
||||
name=species_name
|
||||
)
|
||||
species_map[species_name] = species
|
||||
|
||||
livestocks_to_create.append(
|
||||
LiveStock(
|
||||
herd=herd,
|
||||
species=species,
|
||||
gender=self.map_gender(row.gender),
|
||||
birthdate=self.parse_jalali_datetime(row.birthdate),
|
||||
)
|
||||
)
|
||||
processed_ids.append(row.id)
|
||||
|
||||
if len(livestocks_to_create) >= BATCH_SIZE:
|
||||
print("-----------------------------CREATE------------------------------------")
|
||||
print(livestocks_to_create)
|
||||
LiveStock.objects.bulk_create(
|
||||
livestocks_to_create,
|
||||
batch_size=BATCH_SIZE
|
||||
)
|
||||
created_count += len(livestocks_to_create)
|
||||
livestocks_to_create.clear()
|
||||
break
|
||||
|
||||
# flush remaining
|
||||
if livestocks_to_create:
|
||||
LiveStock.objects.bulk_create(
|
||||
livestocks_to_create,
|
||||
batch_size=BATCH_SIZE
|
||||
)
|
||||
created_count += len(livestocks_to_create)
|
||||
|
||||
# mark excel rows as archived
|
||||
# ExcelLiveStocks.objects.filter(
|
||||
# id__in=processed_ids
|
||||
# ).update(archive=True)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f"Import finished. Created: {created_count}, Skipped: {skipped}"
|
||||
))
|
||||
|
||||
@staticmethod
|
||||
def map_gender(value):
|
||||
if not value:
|
||||
return 1
|
||||
value = value.strip().lower()
|
||||
if value in ['female', 'f', 'ماده']:
|
||||
return 2
|
||||
return 1
|
||||
|
||||
@staticmethod
|
||||
def parse_date(value):
|
||||
if not value:
|
||||
return None
|
||||
try:
|
||||
return datetime.strptime(value, '%Y/%m/%d')
|
||||
except Exception:
|
||||
return None
|
||||
18
apps/livestock/migrations/0017_livestockspecies_value.py
Normal file
18
apps/livestock/migrations/0017_livestockspecies_value.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.0 on 2026-01-24 11:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('livestock', '0016_temporarylivestock'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='livestockspecies',
|
||||
name='value',
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 5.0 on 2026-01-24 11:32
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('livestock', '0017_livestockspecies_value'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='livestockspecies',
|
||||
name='en_name',
|
||||
field=models.CharField(max_length=50, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='livestockspecies',
|
||||
name='name',
|
||||
field=models.CharField(max_length=50, null=True),
|
||||
),
|
||||
]
|
||||
38
apps/livestock/migrations/0019_excellivestocks.py
Normal file
38
apps/livestock/migrations/0019_excellivestocks.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Generated by Django 5.0 on 2026-02-09 10:58
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('livestock', '0018_livestockspecies_en_name_alter_livestockspecies_name'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ExcelLiveStocks',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('create_date', models.DateTimeField(auto_now_add=True)),
|
||||
('modify_date', models.DateTimeField(auto_now=True)),
|
||||
('creator_info', models.CharField(max_length=100, null=True)),
|
||||
('modifier_info', models.CharField(max_length=100, null=True)),
|
||||
('trash', models.BooleanField(default=False)),
|
||||
('national_id', models.CharField(max_length=250, null=True)),
|
||||
('herd_code', models.CharField(max_length=150, null=True)),
|
||||
('species', models.CharField(max_length=250, null=True)),
|
||||
('birthdate', models.CharField(max_length=150, null=True)),
|
||||
('gender', models.CharField(max_length=150, null=True)),
|
||||
('agent_code', models.CharField(max_length=150, null=True)),
|
||||
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(class)s_createddby', to=settings.AUTH_USER_MODEL)),
|
||||
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(class)s_modifiedby', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.0 on 2026-02-10 08:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('livestock', '0019_excellivestocks'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='excellivestocks',
|
||||
name='sync_status',
|
||||
field=models.CharField(max_length=50, null=True),
|
||||
),
|
||||
]
|
||||
@@ -7,7 +7,9 @@ from apps.tag import models as tag_models
|
||||
|
||||
class LiveStockSpecies(BaseModel): # noqa
|
||||
""" species of live stocks like Kurdi, Luri, etc """ # noqa
|
||||
name = models.CharField(max_length=50)
|
||||
name = models.CharField(max_length=50, null=True)
|
||||
en_name = models.CharField(max_length=50, null=True)
|
||||
value = models.IntegerField(default=0)
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.name}'
|
||||
@@ -101,6 +103,16 @@ class LiveStock(BaseModel):
|
||||
return super(LiveStock, self).save(*args, **kwargs)
|
||||
|
||||
|
||||
class ExcelLiveStocks(BaseModel):
|
||||
national_id = models.CharField(max_length=250, null=True)
|
||||
herd_code = models.CharField(max_length=150, null=True)
|
||||
species = models.CharField(max_length=250, null=True)
|
||||
birthdate = models.CharField(max_length=150, null=True)
|
||||
gender = models.CharField(max_length=150, null=True)
|
||||
agent_code = models.CharField(max_length=150, null=True)
|
||||
sync_status = models.CharField(max_length=50, null=True)
|
||||
|
||||
|
||||
class TemporaryLiveStock(BaseModel):
|
||||
rancher = models.ForeignKey(
|
||||
herd_models.Rancher,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from rest_framework import serializers
|
||||
from apps.livestock import models as livestock_models
|
||||
|
||||
from apps.herd.web.api.v1.serializers import HerdSerializer
|
||||
from apps.livestock import models as livestock_models
|
||||
from apps.tag.web.api.v1.serializers import TagSerializer
|
||||
|
||||
|
||||
@@ -28,7 +29,9 @@ class LiveStockSpeciesSerializer(serializers.ModelSerializer):
|
||||
model = livestock_models.LiveStockSpecies
|
||||
fields = [
|
||||
'id',
|
||||
'name'
|
||||
'name',
|
||||
'en_name',
|
||||
'value',
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,23 @@ from rest_framework import status
|
||||
from rest_framework.exceptions import APIException
|
||||
|
||||
|
||||
class DeviceException(APIException):
|
||||
status_code = status.HTTP_400_BAD_REQUEST
|
||||
default_detail = "خطا در اطلاعات پلاک" # noqa
|
||||
default_code = 'error'
|
||||
|
||||
def __init__(self, message=None, status_code=None, code=None):
|
||||
if status_code is not None:
|
||||
self.status_code = status_code
|
||||
|
||||
detail = {
|
||||
"message": message,
|
||||
"status_code": status_code
|
||||
}
|
||||
|
||||
super().__init__(detail)
|
||||
|
||||
|
||||
class DeviceAlreadyAssigned(APIException):
|
||||
status_code = status.HTTP_403_FORBIDDEN
|
||||
default_detail = "این دستگاه قبلا به این کلاینت تخصیص داده شده است" # noqa
|
||||
|
||||
18
apps/pos_device/migrations/0081_alter_device_serial.py
Normal file
18
apps/pos_device/migrations/0081_alter_device_serial.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.0 on 2026-01-26 06:47
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pos_device', '0080_bankaccountdevicelink'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='device',
|
||||
name='serial',
|
||||
field=models.TextField(null=True),
|
||||
),
|
||||
]
|
||||
@@ -36,7 +36,7 @@ class Device(BaseModel):
|
||||
acceptor = models.CharField(max_length=50, null=True)
|
||||
terminal = models.CharField(max_length=50, null=True)
|
||||
mac = models.CharField(max_length=50, null=True)
|
||||
serial = models.TextField(null=True, unique=True)
|
||||
serial = models.TextField(null=True)
|
||||
password = models.CharField(max_length=25, null=True)
|
||||
multi_device = models.BooleanField(default=False)
|
||||
server_in = models.BooleanField(default=False)
|
||||
|
||||
@@ -3,6 +3,7 @@ from rest_framework.serializers import ModelSerializer
|
||||
from apps.authentication.api.v1.serializers.serializer import BankAccountSerializer
|
||||
from apps.pos_device import exceptions as pos_exceptions
|
||||
from apps.pos_device import models as pos_models
|
||||
from apps.pos_device.exceptions import DeviceException
|
||||
from apps.pos_device.web.api.v1.serilaizers import client as client_serializer
|
||||
from apps.product.web.api.v1.serializers.quota_distribution_serializers import QuotaDistributionSerializer
|
||||
|
||||
@@ -18,6 +19,19 @@ class DeviceSerializer(ModelSerializer):
|
||||
model = pos_models.Device
|
||||
fields = '__all__'
|
||||
|
||||
def validate(self, attrs):
|
||||
serial = attrs['serial']
|
||||
|
||||
if not self.instance:
|
||||
if self.Meta.model.objects.filter(serial=serial).exists():
|
||||
raise DeviceException("دستگاه یا این شماره سریال از قبل ثبت شده است.", status_code=403) # noqa
|
||||
|
||||
if self.instance:
|
||||
if serial != self.instance.serial and self.Meta.model.objects.filter(serial=serial).exists():
|
||||
raise DeviceException("دستگاهی با این شماره سریال وجود دارد.", status_code=403) # noqa
|
||||
|
||||
return attrs
|
||||
|
||||
def to_representation(self, instance):
|
||||
""" custom output of serializer """
|
||||
representation = super().to_representation(instance)
|
||||
|
||||
@@ -4,3 +4,6 @@ from django.apps import AppConfig
|
||||
class TagConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.tag'
|
||||
|
||||
def ready(self):
|
||||
import apps.tag.signals.tag_distribution_signals # noqa
|
||||
|
||||
@@ -4,12 +4,12 @@ from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
from apps.herd.models import Herd
|
||||
from apps.livestock.models import LiveStock, LiveStockType
|
||||
from apps.tag.models import Tag, TemporaryTags
|
||||
from apps.livestock.models import LiveStock, LiveStockType, ExcelLiveStocks
|
||||
from apps.tag.models import Tag
|
||||
from common.generics import parse_birthdate
|
||||
|
||||
BATCH_SIZE = 5000
|
||||
CHUNK_SIZE = 10000
|
||||
BATCH_SIZE = 1000
|
||||
CHUNK_SIZE = 1000
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -22,16 +22,16 @@ class Command(BaseCommand):
|
||||
)
|
||||
|
||||
qs = (
|
||||
TemporaryTags.objects
|
||||
ExcelLiveStocks.objects
|
||||
.filter(sync_status__isnull=True)
|
||||
.only('herd_code', 'birthdate', 'gender', 'tag')
|
||||
.only('herd_code', 'birthdate', 'gender', 'national_id')
|
||||
)
|
||||
|
||||
total = qs.count()
|
||||
processed = 0
|
||||
start_time = time.time()
|
||||
|
||||
LOG_EVERY = 10000
|
||||
LOG_EVERY = 1000
|
||||
|
||||
buffer = []
|
||||
for temp in qs.iterator(chunk_size=CHUNK_SIZE):
|
||||
@@ -64,7 +64,7 @@ class Command(BaseCommand):
|
||||
self.stdout.write(self.style.SUCCESS("DONE ✅"))
|
||||
|
||||
def process_batch(self, temps):
|
||||
herd_codes = {t.herd_code for t in temps if t.herd_code}
|
||||
herd_codes = {self.normalize_herd_code(t.herd_code) for t in temps if t.herd_code}
|
||||
|
||||
herds = {
|
||||
h.code: h
|
||||
@@ -90,7 +90,7 @@ class Command(BaseCommand):
|
||||
existing_tags = {
|
||||
t.tag_code: t
|
||||
for t in Tag.objects.filter(
|
||||
tag_code__in=[t.tag for t in temps if t.tag]
|
||||
tag_code__in=[t.national_id for t in temps if t.national_id]
|
||||
)
|
||||
}
|
||||
|
||||
@@ -99,28 +99,28 @@ class Command(BaseCommand):
|
||||
new_tags = []
|
||||
|
||||
for temp in temps:
|
||||
herd = herds.get(temp.herd_code)
|
||||
herd = herds.get(self.normalize_herd_code(temp.herd_code))
|
||||
if not herd:
|
||||
continue
|
||||
|
||||
birthdate = parse_birthdate(temp.birthdate)
|
||||
gender = 1 if temp.gender == 'M' else 2
|
||||
livestock_type = livestock_types.get(temp.type)
|
||||
weight_type = livestock_type.weight_type
|
||||
livestock_type = livestock_types.get(temp.species)
|
||||
weight_type = livestock_type.weight_type if livestock_type else 'L'
|
||||
|
||||
key = (temp.herd_code, birthdate, gender)
|
||||
key = (self.normalize_herd_code(temp.herd_code), birthdate, gender)
|
||||
livestock = livestock_map.get(key)
|
||||
|
||||
if not livestock:
|
||||
if not temp.tag:
|
||||
if not temp.national_id:
|
||||
continue
|
||||
|
||||
tag = existing_tags.get(temp.tag)
|
||||
tag = existing_tags.get(temp.national_id)
|
||||
|
||||
if not tag:
|
||||
tag = Tag(tag_code=temp.tag, status='A')
|
||||
tag = Tag(tag_code=temp.national_id, status='A')
|
||||
new_tags.append(tag)
|
||||
existing_tags[temp.tag] = tag
|
||||
existing_tags[temp.national_id] = tag
|
||||
|
||||
livestock = LiveStock(
|
||||
herd=herd,
|
||||
@@ -136,13 +136,13 @@ class Command(BaseCommand):
|
||||
temp.sync_status = 'S'
|
||||
continue
|
||||
|
||||
if livestock.tag is None and temp.tag:
|
||||
tag = existing_tags.get(temp.tag)
|
||||
if livestock.tag is None and temp.national_id:
|
||||
tag = existing_tags.get(temp.national_id)
|
||||
|
||||
if not tag:
|
||||
tag = Tag(tag_code=temp.tag, status='A')
|
||||
tag = Tag(tag_code=temp.national_id, status='A')
|
||||
new_tags.append(tag)
|
||||
existing_tags[temp.tag] = tag
|
||||
existing_tags[temp.national_id] = tag
|
||||
|
||||
livestock.tag = tag
|
||||
updated_livestock.append(livestock)
|
||||
@@ -161,8 +161,13 @@ class Command(BaseCommand):
|
||||
['tag'],
|
||||
batch_size=BATCH_SIZE
|
||||
)
|
||||
TemporaryTags.objects.bulk_update(
|
||||
ExcelLiveStocks.objects.bulk_update(
|
||||
temps,
|
||||
['sync_status'],
|
||||
batch_size=BATCH_SIZE
|
||||
)
|
||||
|
||||
def normalize_herd_code(self, value, length=10):
|
||||
if value is None:
|
||||
return None
|
||||
return str(value).strip().zfill(length)
|
||||
|
||||
38
apps/tag/migrations/0030_tagdistribution.py
Normal file
38
apps/tag/migrations/0030_tagdistribution.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Generated by Django 5.0 on 2026-01-18 10:44
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('authentication', '0060_organization_ownership_code'),
|
||||
('tag', '0029_alter_tag_tag_code_tagbatch'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='TagDistribution',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('create_date', models.DateTimeField(auto_now_add=True)),
|
||||
('modify_date', models.DateTimeField(auto_now=True)),
|
||||
('creator_info', models.CharField(max_length=100, null=True)),
|
||||
('modifier_info', models.CharField(max_length=100, null=True)),
|
||||
('trash', models.BooleanField(default=False)),
|
||||
('species_code', models.IntegerField(default=0)),
|
||||
('distributed_number', models.IntegerField(default=0)),
|
||||
('batch', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tag_distributions', to='tag.tagbatch')),
|
||||
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(class)s_createddby', to=settings.AUTH_USER_MODEL)),
|
||||
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(class)s_modifiedby', to=settings.AUTH_USER_MODEL)),
|
||||
('organization', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tag_distributions', to='authentication.organization')),
|
||||
('tag', models.ManyToManyField(related_name='distributions', to='tag.tag')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 5.0 on 2026-01-19 06:19
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('authentication', '0060_organization_ownership_code'),
|
||||
('tag', '0030_tagdistribution'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tagbatch',
|
||||
name='batch_identity',
|
||||
field=models.PositiveBigIntegerField(default=0),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tagbatch',
|
||||
name='organization',
|
||||
field=models.ForeignKey(help_text='creator org of tag batch', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tag_batches', to='authentication.organization'),
|
||||
),
|
||||
]
|
||||
17
apps/tag/migrations/0032_remove_tagbatch_batch_identity.py
Normal file
17
apps/tag/migrations/0032_remove_tagbatch_batch_identity.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 5.0 on 2026-01-19 06:25
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('tag', '0031_tagbatch_batch_identity_alter_tagbatch_organization'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='tagbatch',
|
||||
name='batch_identity',
|
||||
),
|
||||
]
|
||||
18
apps/tag/migrations/0033_tagbatch_batch_identity.py
Normal file
18
apps/tag/migrations/0033_tagbatch_batch_identity.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.0 on 2026-01-19 06:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('tag', '0032_remove_tagbatch_batch_identity'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tagbatch',
|
||||
name='batch_identity',
|
||||
field=models.CharField(max_length=50, null=True, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 5.0 on 2026-01-19 06:41
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('tag', '0033_tagbatch_batch_identity'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='tagdistribution',
|
||||
name='organization',
|
||||
),
|
||||
]
|
||||
20
apps/tag/migrations/0035_tagdistribution_assigned_org.py
Normal file
20
apps/tag/migrations/0035_tagdistribution_assigned_org.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Generated by Django 5.0 on 2026-01-19 06:42
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('authentication', '0060_organization_ownership_code'),
|
||||
('tag', '0034_remove_tagdistribution_organization'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tagdistribution',
|
||||
name='assigned_org',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tag_distributions', to='authentication.organization'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 5.0 on 2026-01-19 06:46
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('authentication', '0060_organization_ownership_code'),
|
||||
('tag', '0035_tagdistribution_assigned_org'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tagdistribution',
|
||||
name='assigner_org',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='assigner_tag_dist', to='authentication.organization'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tagdistribution',
|
||||
name='assigned_org',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='assigned_tag_dist', to='authentication.organization'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,54 @@
|
||||
# Generated by Django 5.0 on 2026-01-20 07:31
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('authentication', '0060_organization_ownership_code'),
|
||||
('tag', '0036_tagdistribution_assigner_org_and_more'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tagdistribution',
|
||||
name='dist_identity',
|
||||
field=models.CharField(default='0', max_length=20, null=True, unique=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='tagdistribution',
|
||||
name='is_closed',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tagbatch',
|
||||
name='tag',
|
||||
field=models.ManyToManyField(related_name='batches', to='tag.tag'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='TagDistributionBatch',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('create_date', models.DateTimeField(auto_now_add=True)),
|
||||
('modify_date', models.DateTimeField(auto_now=True)),
|
||||
('creator_info', models.CharField(max_length=100, null=True)),
|
||||
('modifier_info', models.CharField(max_length=100, null=True)),
|
||||
('trash', models.BooleanField(default=False)),
|
||||
('dist_batch_identity', models.CharField(default='0', max_length=20, null=True, unique=True)),
|
||||
('total_tag_count', models.IntegerField(default=0)),
|
||||
('is_closed', models.BooleanField(default=False)),
|
||||
('assigned_org', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='to_tag_distribution_batch', to='authentication.organization')),
|
||||
('assigner_org', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='from_tag_distribution_batch', to='authentication.organization')),
|
||||
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(class)s_createddby', to=settings.AUTH_USER_MODEL)),
|
||||
('distributions', models.ManyToManyField(related_name='tag_distribution_batch', to='tag.tagdistribution')),
|
||||
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(class)s_modifiedby', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 5.0 on 2026-01-24 05:58
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('tag', '0037_tagdistribution_dist_identity_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tagdistributionbatch',
|
||||
name='distribution_type',
|
||||
field=models.CharField(choices=[('random', 'RANDOM'), ('batch', 'BATCH')], default='batch', max_length=20,
|
||||
null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='tagdistributionbatch',
|
||||
name='remaining_tag_count',
|
||||
field=models.PositiveBigIntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='tagdistributionbatch',
|
||||
name='total_distributed_tag_count',
|
||||
field=models.PositiveBigIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
18
apps/tag/migrations/0039_tagbatch_total_distributed_tags.py
Normal file
18
apps/tag/migrations/0039_tagbatch_total_distributed_tags.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.0 on 2026-01-24 06:38
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('tag', '0038_tagdistributionbatch_distribution_type_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tagbatch',
|
||||
name='total_distributed_tags',
|
||||
field=models.PositiveBigIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
19
apps/tag/migrations/0040_tagdistributionbatch_parent.py
Normal file
19
apps/tag/migrations/0040_tagdistributionbatch_parent.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 5.0 on 2026-01-24 09:13
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('tag', '0039_tagbatch_total_distributed_tags'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tagdistributionbatch',
|
||||
name='parent',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='tag.tagdistributionbatch'),
|
||||
),
|
||||
]
|
||||
18
apps/tag/migrations/0041_tagbatch_total_remaining_tags.py
Normal file
18
apps/tag/migrations/0041_tagbatch_total_remaining_tags.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.0 on 2026-01-24 10:31
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('tag', '0040_tagdistributionbatch_parent'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tagbatch',
|
||||
name='total_remaining_tags',
|
||||
field=models.PositiveBigIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
29
apps/tag/migrations/0042_tagdistribution_parent_and_more.py
Normal file
29
apps/tag/migrations/0042_tagdistribution_parent_and_more.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Generated by Django 5.0 on 2026-01-27 09:18
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('tag', '0041_tagbatch_total_remaining_tags'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tagdistribution',
|
||||
name='parent',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='child', to='tag.tagdistribution'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='tagdistribution',
|
||||
name='remaining_number',
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='tagdistribution',
|
||||
name='total_tag_count',
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 5.0 on 2026-02-07 07:29
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('authentication', '0060_organization_ownership_code'),
|
||||
('tag', '0042_tagdistribution_parent_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tagdistributionbatch',
|
||||
name='owner_org',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tag_distribution_batch', to='authentication.organization'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='tagdistributionbatch',
|
||||
name='top_root_distribution',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 5.0 on 2026-02-08 07:27
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('tag', '0043_tagdistributionbatch_owner_org_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='tagdistributionbatch',
|
||||
name='exit_doc_status',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='tagdistributionbatch',
|
||||
name='warehouse_exit_doc',
|
||||
field=models.CharField(max_length=350, null=True),
|
||||
),
|
||||
]
|
||||
@@ -47,11 +47,17 @@ class Tag(BaseModel):
|
||||
|
||||
|
||||
class TagBatch(BaseModel):
|
||||
batch_identity = models.CharField(
|
||||
unique=True,
|
||||
max_length=50,
|
||||
null=True
|
||||
)
|
||||
organization = models.ForeignKey(
|
||||
Organization,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='tag_batches',
|
||||
null=True
|
||||
null=True,
|
||||
help_text="creator org of tag batch"
|
||||
)
|
||||
request_number = models.CharField(
|
||||
max_length=50,
|
||||
@@ -59,10 +65,12 @@ class TagBatch(BaseModel):
|
||||
null=True
|
||||
)
|
||||
|
||||
tag = models.ManyToManyField(Tag, related_name='tags')
|
||||
tag = models.ManyToManyField(Tag, related_name='batches')
|
||||
species_code = models.IntegerField(default=0)
|
||||
serial_from = models.PositiveBigIntegerField(default=0)
|
||||
serial_to = models.PositiveBigIntegerField(default=0)
|
||||
total_distributed_tags = models.PositiveBigIntegerField(default=0)
|
||||
total_remaining_tags = models.PositiveBigIntegerField(default=0)
|
||||
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
@@ -83,29 +91,96 @@ class TagBatch(BaseModel):
|
||||
|
||||
|
||||
class TagDistribution(BaseModel):
|
||||
parent = models.ForeignKey(
|
||||
'self',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='child',
|
||||
null=True
|
||||
)
|
||||
dist_identity = models.CharField(max_length=20, default="0", unique=True, null=True)
|
||||
batch = models.ForeignKey(
|
||||
TagBatch,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='distributions',
|
||||
related_name='tag_distributions',
|
||||
null=True
|
||||
)
|
||||
tag = models.ManyToManyField(Tag, related_name='distributions')
|
||||
organization = models.ForeignKey(
|
||||
assigner_org = models.ForeignKey(
|
||||
Organization,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='distributions',
|
||||
related_name='assigner_tag_dist',
|
||||
null=True
|
||||
)
|
||||
assigned_org = models.ForeignKey(
|
||||
Organization,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='assigned_tag_dist',
|
||||
null=True
|
||||
)
|
||||
species_code = models.IntegerField(default=0)
|
||||
total_tag_count = models.IntegerField(default=0)
|
||||
distributed_number = models.IntegerField(default=0)
|
||||
remaining_number = models.IntegerField(default=0)
|
||||
is_closed = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.id}-{self.distributed_number}-{self.organization.name}'
|
||||
return f'{self.id}-{self.distributed_number}-{self.assigned_org.name}'
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
return super(TagDistribution, self).save(*args, **kwargs)
|
||||
|
||||
|
||||
class TagDistributionBatch(BaseModel):
|
||||
parent = models.ForeignKey(
|
||||
'self',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='children',
|
||||
null=True
|
||||
)
|
||||
dist_batch_identity = models.CharField(max_length=20, default="0", unique=True, null=True)
|
||||
owner_org = models.ForeignKey(
|
||||
Organization,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='tag_distribution_batch',
|
||||
null=True
|
||||
)
|
||||
assigner_org = models.ForeignKey(
|
||||
Organization,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='from_tag_distribution_batch',
|
||||
null=True
|
||||
)
|
||||
assigned_org = models.ForeignKey(
|
||||
Organization,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="to_tag_distribution_batch",
|
||||
null=True
|
||||
)
|
||||
distribution_type = models.CharField(
|
||||
choices=(
|
||||
('random', 'RANDOM'),
|
||||
('batch', 'BATCH'),
|
||||
),
|
||||
max_length=20,
|
||||
null=True,
|
||||
default='batch'
|
||||
)
|
||||
distributions = models.ManyToManyField(TagDistribution, related_name='tag_distribution_batch')
|
||||
total_tag_count = models.IntegerField(default=0)
|
||||
total_distributed_tag_count = models.PositiveBigIntegerField(default=0)
|
||||
remaining_tag_count = models.PositiveBigIntegerField(default=0)
|
||||
top_root_distribution = models.BooleanField(default=False)
|
||||
warehouse_exit_doc = models.CharField(max_length=350, null=True)
|
||||
exit_doc_status = models.BooleanField(default=False)
|
||||
is_closed = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.id}'
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
return super(TagDistributionBatch, self).save(*args, **kwargs)
|
||||
|
||||
|
||||
class TagAssignment(BaseModel):
|
||||
organization = models.ForeignKey(
|
||||
auth_models.Organization,
|
||||
|
||||
69
apps/tag/services/tag_batch_service.py
Normal file
69
apps/tag/services/tag_batch_service.py
Normal file
@@ -0,0 +1,69 @@
|
||||
from django.db.models import Sum, Q, Count, QuerySet, OuterRef, Subquery, IntegerField
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
from apps.authentication.models import Organization
|
||||
from apps.authentication.services.service import get_all_org_child
|
||||
from apps.tag.models import TagBatch
|
||||
|
||||
|
||||
class TagBatchService:
|
||||
"""
|
||||
services of tag batch
|
||||
"""
|
||||
|
||||
def tag_batch_main_dashboard(self, org: Organization = None, batch: QuerySet[TagBatch] = None):
|
||||
"""
|
||||
dashboard data of batch main page
|
||||
"""
|
||||
|
||||
qs = TagBatch.objects.select_related('organization') if not batch else batch
|
||||
|
||||
if org.type.key != 'ADM':
|
||||
child_orgs = get_all_org_child(org) # noqa
|
||||
child_orgs.append(org)
|
||||
qs = qs.filter(organization__in=child_orgs)
|
||||
|
||||
base_data = qs.aggregate(
|
||||
batch_count=Count('id', distinct=True),
|
||||
total_distributed_tags=Coalesce(Sum('total_distributed_tags'), 0),
|
||||
total_remaining_tags=Coalesce(Sum('total_remaining_tags'), 0),
|
||||
has_distributed_batches_number=Count(
|
||||
'id',
|
||||
distinct=True,
|
||||
filter=Q(status__in=[
|
||||
'distributed',
|
||||
])
|
||||
)
|
||||
)
|
||||
base_data.update(qs.aggregate(tag_count_created_by_batch=Count('tag')))
|
||||
|
||||
tag_count_subquery = (
|
||||
TagBatch.objects
|
||||
.filter(id=OuterRef('id'))
|
||||
.annotate(cnt=Count('tag'))
|
||||
.values('cnt')
|
||||
)
|
||||
|
||||
species_data = (
|
||||
qs
|
||||
.annotate(
|
||||
tag_count=Subquery(tag_count_subquery, output_field=IntegerField())
|
||||
)
|
||||
.values('species_code')
|
||||
.annotate(
|
||||
batch_count=Count('id', distinct=True),
|
||||
total_distributed_tags=Coalesce(Sum('total_distributed_tags'), 0),
|
||||
total_remaining_tags=Coalesce(Sum('total_remaining_tags'), 0),
|
||||
tag_count_created_by_batch=Coalesce(Sum('tag_count'), 0),
|
||||
has_distributed_batches_number=Count(
|
||||
'id',
|
||||
distinct=True,
|
||||
filter=Q(status='distributed')
|
||||
)
|
||||
)
|
||||
.order_by('species_code')
|
||||
)
|
||||
|
||||
base_data['batch_data_by_species'] = list(species_data)
|
||||
|
||||
return base_data
|
||||
377
apps/tag/services/tag_distribution_services.py
Normal file
377
apps/tag/services/tag_distribution_services.py
Normal file
@@ -0,0 +1,377 @@
|
||||
import random
|
||||
|
||||
from django.db import transaction
|
||||
from django.db.models import Sum, Q
|
||||
from django.db.models.aggregates import Count
|
||||
from django.db.models.functions import Coalesce
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
from apps.authentication.models import Organization
|
||||
from apps.livestock.models import LiveStockSpecies
|
||||
from apps.tag.exceptions import TagException
|
||||
from apps.tag.models import Tag, TagBatch, TagDistribution, TagDistributionBatch
|
||||
from common.generics import generate_unique_code
|
||||
|
||||
|
||||
class TagDistributionService:
|
||||
"""
|
||||
service of distribute tags in organizations
|
||||
"""
|
||||
|
||||
def create_distribution_from_batch(self, org: Organization = None, data: dict = None):
|
||||
"""
|
||||
distribute tags with batch
|
||||
"""
|
||||
with transaction.atomic():
|
||||
distributions = []
|
||||
total_counted_tags = 0
|
||||
assigned_org = Organization.objects.get(id=data.get('assigned_org'))
|
||||
|
||||
for distribution in data.get('dists'):
|
||||
batch_identity = distribution.get('batch_identity', None)
|
||||
# if batch identity exists distribute tags of batch
|
||||
if batch_identity:
|
||||
batch = TagBatch.objects.get(batch_identity=batch_identity)
|
||||
tags = Tag.objects.filter(
|
||||
batches__batch_identity=batch_identity,
|
||||
species_code=distribution.get('species_code'),
|
||||
status='F'
|
||||
)
|
||||
distribution_type = 'batch'
|
||||
else:
|
||||
batch = None
|
||||
# get tags without batch and only with species code
|
||||
tags = Tag.objects.filter(
|
||||
species_code=distribution.get('species_code'),
|
||||
status='F'
|
||||
)
|
||||
distribution_type = 'random'
|
||||
|
||||
if tags.count() < distribution.get('count'):
|
||||
raise TagException(
|
||||
"تعداد وارد شده از تعداد موجودی این گونه بیشتر میباشد.", # noqa
|
||||
403
|
||||
)
|
||||
|
||||
dist = TagDistribution.objects.create(
|
||||
batch=batch,
|
||||
assigner_org=org,
|
||||
assigned_org=assigned_org,
|
||||
species_code=distribution.get('species_code'),
|
||||
total_tag_count=distribution.get('count'),
|
||||
remaining_number=distribution.get('count'),
|
||||
dist_identity=generate_unique_code(f"{random.randint(1000, 9999)}"),
|
||||
)
|
||||
|
||||
# get counted tag ids and filter by them to update status To Reserve
|
||||
counted_tags_obj = tags.order_by('create_date')[:int(distribution.get('count'))]
|
||||
counted_tag_ids = [tag.id for tag in counted_tags_obj]
|
||||
tags.filter(id__in=counted_tag_ids).update(status='R')
|
||||
|
||||
dist.tag.add(*counted_tags_obj)
|
||||
distributions.append(dist)
|
||||
|
||||
total_counted_tags += distribution.get('count')
|
||||
|
||||
# create distribution batch
|
||||
distributions_batch = TagDistributionBatch.objects.create(
|
||||
parent=TagDistributionBatch.objects.get(id=data.get('parent')) if data.get('parent') else None,
|
||||
owner_org=assigned_org,
|
||||
assigner_org=org,
|
||||
assigned_org=assigned_org,
|
||||
total_tag_count=total_counted_tags,
|
||||
dist_batch_identity=generate_unique_code(f"{random.randint(1000, 9999)}"),
|
||||
distribution_type=distribution_type,
|
||||
top_root_distribution=True
|
||||
)
|
||||
distributions_batch.distributions.add(*distributions)
|
||||
|
||||
return {'tag_distributions': distributions, 'distributions_batch': distributions_batch}
|
||||
|
||||
def edit_distribution_from_batch(
|
||||
self, dist_batch: TagDistributionBatch = None,
|
||||
data: dict = None,
|
||||
org: Organization = None
|
||||
):
|
||||
"""
|
||||
edit record of distributed tags
|
||||
"""
|
||||
|
||||
# clear and hard delete of distributions
|
||||
dist_batch_distributions = dist_batch.distributions.all()
|
||||
for dist in dist_batch_distributions: # free distributed tags from reserve
|
||||
dist.tag.all().update(status='F')
|
||||
dist_batch_distributions.delete()
|
||||
|
||||
# create new distributions and update batch
|
||||
total_counted_tags = 0
|
||||
distributions = []
|
||||
assigned_org = Organization.objects.get(id=data.get('assigned_org'))
|
||||
for distribution in data.get('dists'):
|
||||
batch_identity = distribution.get('batch_identity', None)
|
||||
# if batch identity exists distribute tags of batch
|
||||
if batch_identity:
|
||||
batch = TagBatch.objects.get(batch_identity=batch_identity)
|
||||
tags = Tag.objects.filter(
|
||||
batches__batch_identity=batch_identity,
|
||||
species_code=distribution.get('species_code'),
|
||||
status='F'
|
||||
)
|
||||
distribution_type = 'batch'
|
||||
else:
|
||||
batch = None
|
||||
# get tags without batch and only with species code
|
||||
tags = Tag.objects.filter(
|
||||
species_code=distribution.get('species_code'),
|
||||
status='F'
|
||||
)
|
||||
distribution_type = 'random'
|
||||
|
||||
if tags.count() < distribution.get('count'):
|
||||
raise TagException(
|
||||
"تعداد وارد شده از تعداد موجودی این گونه بیشتر میباشد.", # noqa
|
||||
403
|
||||
)
|
||||
|
||||
dist = TagDistribution.objects.create(
|
||||
batch=batch,
|
||||
assigner_org=org,
|
||||
assigned_org=assigned_org,
|
||||
species_code=distribution.get('species_code'),
|
||||
total_tag_count=distribution.get('count'),
|
||||
remaining_number=distribution.get('count'),
|
||||
dist_identity=generate_unique_code(f"{random.randint(1000, 9999)}"),
|
||||
)
|
||||
|
||||
# get counted tag ids and filter by them to update status To Reserve
|
||||
counted_tags_obj = tags.order_by('create_date')[:int(distribution.get('count'))]
|
||||
counted_tag_ids = [tag.id for tag in counted_tags_obj]
|
||||
tags.filter(id__in=counted_tag_ids).update(status='R')
|
||||
|
||||
dist.tag.add(*counted_tags_obj)
|
||||
distributions.append(dist)
|
||||
|
||||
total_counted_tags += distribution.get('count')
|
||||
|
||||
# update distribution batch
|
||||
dist_batch.assigned_org = assigned_org
|
||||
dist_batch.total_tag_count = total_counted_tags
|
||||
dist_batch.distribution_type = distribution_type # noqa
|
||||
dist_batch.save(update_fields=['assigned_org', 'total_tag_count', 'distribution_type'])
|
||||
dist_batch.distributions.add(*distributions)
|
||||
|
||||
return {'tag_distributions': distributions, 'distributions_batch': dist_batch}
|
||||
|
||||
def create_distribution_from_distribution(
|
||||
self, org: Organization = None,
|
||||
tag_batch: TagDistributionBatch = None,
|
||||
data: dict = None
|
||||
):
|
||||
"""
|
||||
create a distribution from distribution to target organization
|
||||
"""
|
||||
with transaction.atomic():
|
||||
distributions = []
|
||||
total_counted_tags = 0
|
||||
|
||||
assigned_org = Organization.objects.get(id=data['assigned_org'])
|
||||
parent_batch = TagDistributionBatch.objects.get(
|
||||
id=data['parent_distribution_batch']
|
||||
)
|
||||
|
||||
if parent_batch.assigned_org != org and org.type.key != 'ADM':
|
||||
raise PermissionDenied("دسترسی غیرمجاز") # noqa
|
||||
|
||||
for dist_data in data['dists']:
|
||||
species = dist_data['species_code']
|
||||
count = dist_data['count']
|
||||
parent_tag_distribution = TagDistribution.objects.get(
|
||||
id=dist_data['parent_tag_distribution']
|
||||
)
|
||||
batch = TagBatch.objects.get(
|
||||
batch_identity=dist_data.get('batch_identity')
|
||||
) if dist_data.get('batch_identity') else None
|
||||
|
||||
tags = Tag.objects.filter(
|
||||
distributions__tag_distribution_batch=parent_batch,
|
||||
species_code=species,
|
||||
status='R',
|
||||
)
|
||||
|
||||
if tags.count() < count:
|
||||
raise TagException("پلاک کافی برای این گونه وجود ندارد", 403) # noqa
|
||||
|
||||
dist = TagDistribution.objects.create(
|
||||
parent=parent_tag_distribution,
|
||||
batch=batch,
|
||||
assigner_org=org,
|
||||
assigned_org=assigned_org,
|
||||
species_code=species,
|
||||
total_tag_count=count,
|
||||
remaining_number=count,
|
||||
dist_identity=generate_unique_code(
|
||||
f"{random.randint(1000, 9999)}"
|
||||
),
|
||||
)
|
||||
|
||||
selected_tags = tags.order_by('create_date')[:count]
|
||||
dist.tag.add(*selected_tags)
|
||||
|
||||
distributions.append(dist)
|
||||
total_counted_tags += count
|
||||
|
||||
dist_batch = TagDistributionBatch.objects.create(
|
||||
parent=parent_batch,
|
||||
owner_org=assigned_org,
|
||||
assigner_org=org,
|
||||
assigned_org=assigned_org,
|
||||
total_tag_count=total_counted_tags,
|
||||
distribution_type=parent_batch.distribution_type,
|
||||
dist_batch_identity=generate_unique_code(
|
||||
f"{random.randint(1000, 9999)}"
|
||||
)
|
||||
)
|
||||
|
||||
dist_batch.distributions.add(*distributions)
|
||||
|
||||
return {
|
||||
'tag_distributions': distributions,
|
||||
'distributions_batch': dist_batch
|
||||
}
|
||||
|
||||
def edit_distribution_from_distribution(
|
||||
self, org: Organization = None,
|
||||
tag_batch: TagDistributionBatch = None,
|
||||
data: dict = None
|
||||
|
||||
):
|
||||
with transaction.atomic():
|
||||
|
||||
if tag_batch.assigner_org != org:
|
||||
raise PermissionDenied("اجازه ویرایش این توزیع را ندارید") # noqa
|
||||
|
||||
for dist in tag_batch.distributions.all():
|
||||
dist.tag.all().update(
|
||||
status='R',
|
||||
organization=org
|
||||
)
|
||||
|
||||
old_distributions = tag_batch.distributions.all()
|
||||
tag_batch.distributions.clear()
|
||||
old_distributions.delete()
|
||||
|
||||
assigned_org = Organization.objects.get(id=data['assigned_org'])
|
||||
parent_batch = tag_batch.parent
|
||||
|
||||
distributions = []
|
||||
total_counted_tags = 0
|
||||
|
||||
for dist_data in data['dists']:
|
||||
species = dist_data['species_code']
|
||||
parent_tag_distribution = TagDistribution.objects.get(
|
||||
id=dist_data['parent_tag_distribution']
|
||||
)
|
||||
batch = TagBatch.objects.get(
|
||||
batch_identity=dist_data.get('batch_identity')
|
||||
) if dist_data.get('batch_identity') else None
|
||||
count = dist_data['count']
|
||||
|
||||
tags = Tag.objects.filter(
|
||||
distributions__tag_distribution_batch=parent_batch,
|
||||
species_code=species,
|
||||
status='R',
|
||||
)
|
||||
|
||||
if tags.count() < count:
|
||||
raise TagException(
|
||||
"پلاک کافی برای این گونه وجود ندارد", # noqa
|
||||
403
|
||||
)
|
||||
|
||||
dist = TagDistribution.objects.create(
|
||||
parent=parent_tag_distribution,
|
||||
batch=batch,
|
||||
assigner_org=org,
|
||||
assigned_org=assigned_org,
|
||||
species_code=species,
|
||||
total_tag_count=count,
|
||||
remaining_number=count,
|
||||
dist_identity=generate_unique_code(
|
||||
f"{random.randint(1000, 9999)}"
|
||||
),
|
||||
)
|
||||
|
||||
selected_tags = tags.order_by('create_date')[:count]
|
||||
dist.tag.add(*selected_tags)
|
||||
|
||||
distributions.append(dist)
|
||||
total_counted_tags += count
|
||||
|
||||
# 5️⃣ update distribution batch
|
||||
tag_batch.assigned_org = assigned_org
|
||||
tag_batch.total_tag_count = total_counted_tags
|
||||
tag_batch.is_closed = False
|
||||
tag_batch.save(update_fields=[
|
||||
'assigned_org',
|
||||
'total_tag_count',
|
||||
'is_closed'
|
||||
])
|
||||
|
||||
tag_batch.distributions.add(*distributions)
|
||||
|
||||
return {
|
||||
'tag_distributions': distributions,
|
||||
'distributions_batch': tag_batch
|
||||
}
|
||||
|
||||
def distribution_batch_main_dashboard(self, org: Organization, is_closed: str = 'false'):
|
||||
"""
|
||||
distribution batch main page dashboard detail
|
||||
"""
|
||||
|
||||
is_closed = False if is_closed == 'false' else True
|
||||
|
||||
if org.type.key == 'ADM':
|
||||
distribution_query = (Q(is_closed=is_closed))
|
||||
else:
|
||||
distribution_query = (
|
||||
Q(assigner_org=org) |
|
||||
Q(assigned_org=org),
|
||||
Q(is_closed=is_closed)
|
||||
)
|
||||
|
||||
distributions_batch = TagDistributionBatch.objects.prefetch_related(
|
||||
'distributions'
|
||||
).filter(distribution_query)
|
||||
|
||||
data = distributions_batch.aggregate(
|
||||
count=Count('id'),
|
||||
total_sent_tag_count=Coalesce(Sum('total_tag_count', filter=Q(assigner_org=org)), 0),
|
||||
total_recieved_tag_count=Coalesce(Sum('total_tag_count', filter=Q(assigned_org=org)), 0),
|
||||
total_recieved_distributions=Count('id', filter=Q(assigned_org=org)),
|
||||
total_sent_distributions=Count('id', filter=Q(assigner_org=org)),
|
||||
total_distributed_tag_count=Sum('total_distributed_tag_count'),
|
||||
remaining_tag_count=Sum('remaining_tag_count'),
|
||||
)
|
||||
|
||||
# distributions item list detail
|
||||
items_list = []
|
||||
distributions = TagDistribution.objects.filter(
|
||||
distribution_query
|
||||
)
|
||||
|
||||
species = LiveStockSpecies.objects.values('value')
|
||||
|
||||
for spec in species:
|
||||
dist_data = distributions.aggregate(
|
||||
dist_count=Count('id', filter=Q(species_code=spec.get('value'))),
|
||||
tag_count=Coalesce(
|
||||
Sum('distributed_number', filter=Q(species_code=spec.get('value'))), 0
|
||||
)
|
||||
)
|
||||
dist_data.update({'species_code': spec.get('value')}) # add species code to data
|
||||
items_list.append(dist_data)
|
||||
|
||||
data.update({'items': items_list})
|
||||
|
||||
return data
|
||||
@@ -1,3 +1,5 @@
|
||||
import random
|
||||
|
||||
from django.db.models import Q
|
||||
from django.db.models.aggregates import Count
|
||||
|
||||
@@ -7,6 +9,14 @@ from apps.tag.exceptions import TagException
|
||||
from apps.tag.models import Tag, TagBatch
|
||||
from apps.tag.tools import tag_code_serial_scanning
|
||||
|
||||
SPECIES_MAP = {
|
||||
'cow': 1,
|
||||
'buffalo': 2,
|
||||
'camel': 3,
|
||||
'sheep': 4,
|
||||
'goat': 5,
|
||||
}
|
||||
|
||||
|
||||
class TagService:
|
||||
"""
|
||||
@@ -44,12 +54,15 @@ class TagService:
|
||||
|
||||
# create tag batch
|
||||
request_number = (serial_end_range - serial_start_range) + 1
|
||||
batch_identity = f'{serial_start_range}{serial_end_range}{data.get("species_code")}{random.randint(1000, 9999)}'
|
||||
batch = TagBatch.objects.create(
|
||||
batch_identity=batch_identity,
|
||||
organization=org,
|
||||
request_number=request_number if request_number > 0 else 1,
|
||||
species_code=data.get('species_code'),
|
||||
serial_from=serial_start_range,
|
||||
serial_to=serial_end_range,
|
||||
total_remaining_tags=request_number if request_number > 0 else 1,
|
||||
status='created',
|
||||
)
|
||||
|
||||
@@ -92,6 +105,7 @@ class TagService:
|
||||
batch.species_code = data.get('species_code')
|
||||
batch.serial_from = serial_start_range
|
||||
batch.serial_to = serial_end_range
|
||||
batch.total_remaining_tags = request_number
|
||||
batch.save(update_fields=['request_number', 'species_code', 'serial_from', 'serial_to'])
|
||||
|
||||
# recreate tags for batch
|
||||
|
||||
0
apps/tag/signals/__init__.py
Normal file
0
apps/tag/signals/__init__.py
Normal file
75
apps/tag/signals/tag_distribution_signals.py
Normal file
75
apps/tag/signals/tag_distribution_signals.py
Normal file
@@ -0,0 +1,75 @@
|
||||
from django.db.models import Sum
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.db.models.signals import m2m_changed
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from apps.tag.models import TagDistribution, TagDistributionBatch, Tag
|
||||
|
||||
|
||||
@receiver(m2m_changed, sender=TagDistribution.tag.through)
|
||||
def update_batch_on_distribution_change(
|
||||
sender, instance: TagDistribution, action, **kwargs
|
||||
):
|
||||
if action not in ['post_add', 'post_remove', 'post_clear']:
|
||||
return
|
||||
|
||||
if not instance.batch:
|
||||
return
|
||||
|
||||
if instance.parent:
|
||||
return
|
||||
|
||||
batch = instance.batch
|
||||
|
||||
distributions = TagDistribution.objects.filter(batch=batch)
|
||||
|
||||
distributed_tags = Tag.objects.filter(
|
||||
distributions__batch=batch,
|
||||
status__in=['R', 'A'],
|
||||
).distinct().count()
|
||||
|
||||
print("distributed_tags", distributed_tags)
|
||||
batch.total_distributed_tags = distributed_tags
|
||||
batch.total_remaining_tags = (
|
||||
int(batch.request_number) - distributed_tags
|
||||
)
|
||||
|
||||
batch.status = (
|
||||
'distributed'
|
||||
if batch.total_remaining_tags == 0
|
||||
else 'created'
|
||||
)
|
||||
|
||||
batch.save(update_fields=[
|
||||
'total_distributed_tags',
|
||||
'total_remaining_tags',
|
||||
'status'
|
||||
])
|
||||
|
||||
|
||||
@receiver(post_save, sender=TagDistributionBatch)
|
||||
def calculate_tag_distribution_detail(sender, instance: TagDistributionBatch, **kwargs):
|
||||
"""
|
||||
calculate distribution & remaining distributed tags
|
||||
"""
|
||||
|
||||
if getattr(instance, 'flag', False):
|
||||
return
|
||||
|
||||
tag_dist_batch = instance
|
||||
parent = tag_dist_batch.parent
|
||||
if parent:
|
||||
parent.total_distributed_tag_count = parent.children.aggregate(
|
||||
total=Coalesce(Sum('total_tag_count'), 0)
|
||||
)['total']
|
||||
parent.remaining_tag_count = (
|
||||
parent.total_tag_count - parent.total_distributed_tag_count
|
||||
)
|
||||
parent.parent_flag = True
|
||||
parent.save(update_fields=['remaining_tag_count', 'total_distributed_tag_count'])
|
||||
|
||||
if not getattr(instance, 'parent_flag', False):
|
||||
tag_dist_batch.remaining_tag_count = tag_dist_batch.total_tag_count
|
||||
instance.flag = True
|
||||
tag_dist_batch.save(update_fields=['remaining_tag_count'])
|
||||
92
apps/tag/templates/pdf/tag_distribution.html
Normal file
92
apps/tag/templates/pdf/tag_distribution.html
Normal file
@@ -0,0 +1,92 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="fa" dir="rtl">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<style>
|
||||
@page {
|
||||
size: A4;
|
||||
margin: 2cm;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: DejaVu Sans;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
h1 {
|
||||
text-align: center;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.meta {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.meta div {
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
th, td {
|
||||
border: 1px solid #333;
|
||||
padding: 6px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
th {
|
||||
background: #f0f0f0;
|
||||
}
|
||||
|
||||
.footer {
|
||||
margin-top: 30px;
|
||||
font-size: 10px;
|
||||
text-align: center;
|
||||
color: #666;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>سند توزیع پلاک دام</h1>
|
||||
|
||||
<div class="meta">
|
||||
<div><strong>شناسه توزیع:</strong> {{ batch.dist_batch_identity }}</div>
|
||||
<div><strong>سازمان تخصیصدهنده:</strong> {{ batch.assigner_org.name }}</div>
|
||||
<div><strong>سازمان دریافتکننده:</strong> {{ batch.assigned_org.name }}</div>
|
||||
<div><strong>تاریخ ایجاد:</strong> {{ batch.create_date }}</div>
|
||||
<div><strong>تعداد کل پلاک:</strong> {{ batch.total_tag_count }}</div>
|
||||
</div>
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>کد گونه</th>
|
||||
<th>از سریال</th>
|
||||
<th>تا سریال</th>
|
||||
<th>تعداد کل</th>
|
||||
<th>باقیمانده</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for dist in batch.distributions.all %}
|
||||
<tr>
|
||||
<td>{{ dist.species_code }}</td>
|
||||
<td>{{ dist.serial_from }}</td>
|
||||
<td>{{ dist.serial_to }}</td>
|
||||
<td>{{ dist.total_tag_count }}</td>
|
||||
<td>{{ dist.remaining_number }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div class="footer">
|
||||
این سند به صورت سیستمی تولید شده و معتبر میباشد.
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,3 +1,4 @@
|
||||
import random
|
||||
import typing
|
||||
|
||||
from django.db import transaction
|
||||
@@ -16,16 +17,22 @@ from apps.core.mixins.soft_delete_mixin import SoftDeleteMixin
|
||||
from apps.tag import exceptions as tag_exceptions
|
||||
from apps.tag import models as tag_models
|
||||
from apps.tag.models import TagBatch
|
||||
from apps.tag.services.tag_batch_service import TagBatchService
|
||||
from apps.tag.services.tag_distribution_services import TagDistributionService
|
||||
from apps.tag.services.tag_services import TagService
|
||||
from common.helpers import get_organization_by_user
|
||||
from common.liara_tools import upload_to_liara
|
||||
from common.storage import upload_to_storage
|
||||
from .serializers import (
|
||||
TagSerializer,
|
||||
TagAssignmentSerializer,
|
||||
AllocatedTagsSerializer, TagBatchSerializer
|
||||
AllocatedTagsSerializer, TagBatchSerializer, TagDistributionSerializer, TagDistributionBatchSerializer
|
||||
)
|
||||
|
||||
|
||||
# from weasyprint import HTML
|
||||
|
||||
|
||||
class TagViewSet(BaseViewSet, TagService, SoftDeleteMixin, DynamicSearchMixin, viewsets.ModelViewSet):
|
||||
""" Tag View Set """
|
||||
queryset = tag_models.Tag.objects.all()
|
||||
@@ -33,6 +40,7 @@ class TagViewSet(BaseViewSet, TagService, SoftDeleteMixin, DynamicSearchMixin, v
|
||||
filter_backends = [SearchFilter]
|
||||
search_fields = [
|
||||
'serial',
|
||||
'status',
|
||||
'tag_code',
|
||||
'organization__name',
|
||||
'organization__type__key',
|
||||
@@ -68,7 +76,6 @@ class TagViewSet(BaseViewSet, TagService, SoftDeleteMixin, DynamicSearchMixin, v
|
||||
|
||||
org = get_organization_by_user(request.user) # noqa
|
||||
serial_start_range, serial_end_range = request.data.pop('serial_range') # serial_range is like [500, 550]
|
||||
print(serial_start_range, serial_end_range)
|
||||
data = request.data.copy()
|
||||
|
||||
# create tag & batch
|
||||
@@ -115,6 +122,25 @@ class TagViewSet(BaseViewSet, TagService, SoftDeleteMixin, DynamicSearchMixin, v
|
||||
response = self.tag_detail(by_id=pk)
|
||||
return Response(response)
|
||||
|
||||
@action(
|
||||
methods=['get'],
|
||||
detail=True,
|
||||
url_name='tags_by_batch',
|
||||
url_path='tags_by_batch',
|
||||
name='tags_by_batch',
|
||||
)
|
||||
def get_tags_by_batch_id(self, request, pk=None):
|
||||
"""
|
||||
get tags by batch id
|
||||
"""
|
||||
tags = self.queryset.filter(batches__id=pk)
|
||||
|
||||
page = self.paginate_queryset(tags)
|
||||
if page is not None: # noqa
|
||||
serializer = self.get_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
return Response(self.serializer_class(tags).data)
|
||||
|
||||
@action(
|
||||
methods=['get'],
|
||||
detail=False,
|
||||
@@ -276,8 +302,8 @@ class TagAssignmentViewSet(BaseViewSet, SoftDeleteMixin, DynamicSearchMixin, vie
|
||||
check_response = GeneralOTPViewSet().check_otp(request)
|
||||
if check_response.status_code == 200:
|
||||
return Response(check_response.status_code, status=status.HTTP_200_OK)
|
||||
else:
|
||||
return Response(check_response.status_code, status=status.HTTP_403_FORBIDDEN)
|
||||
return Response(check_response.status_code, status=status.HTTP_403_FORBIDDEN)
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class AllocatedTagsViewSet(SoftDeleteMixin, viewsets.ModelViewSet):
|
||||
@@ -285,7 +311,7 @@ class AllocatedTagsViewSet(SoftDeleteMixin, viewsets.ModelViewSet):
|
||||
serializer_class = AllocatedTagsSerializer
|
||||
|
||||
|
||||
class TagBatchViewSet(BaseViewSet, SoftDeleteMixin, DynamicSearchMixin, viewsets.ModelViewSet):
|
||||
class TagBatchViewSet(BaseViewSet, SoftDeleteMixin, DynamicSearchMixin, TagBatchService, viewsets.ModelViewSet):
|
||||
queryset = TagBatch.objects.all()
|
||||
serializer_class = TagBatchSerializer
|
||||
filter_backends = [SearchFilter]
|
||||
@@ -303,6 +329,11 @@ class TagBatchViewSet(BaseViewSet, SoftDeleteMixin, DynamicSearchMixin, viewsets
|
||||
|
||||
queryset = self.get_queryset(visibility_by_org_scope=True).order_by('-create_date')
|
||||
|
||||
params = self.request.query_params # noqa
|
||||
|
||||
if params.get('species_code'):
|
||||
queryset = queryset.filter(species_code=int(params.get('species_code')))
|
||||
|
||||
# filter queryset
|
||||
queryset = self.filter_query(self.filter_queryset(queryset))
|
||||
|
||||
@@ -312,6 +343,44 @@ class TagBatchViewSet(BaseViewSet, SoftDeleteMixin, DynamicSearchMixin, viewsets
|
||||
return self.get_paginated_response(serializer.data)
|
||||
return Response(self.serializer_class(queryset).data)
|
||||
|
||||
@action(
|
||||
methods=['get'],
|
||||
detail=False,
|
||||
url_name='main_dashboard',
|
||||
url_path='main_dashboard',
|
||||
name='main_dashboard',
|
||||
)
|
||||
def main_dashboard(self, request):
|
||||
"""
|
||||
dashboard of tag batches main page
|
||||
"""
|
||||
org = get_organization_by_user(request.user)
|
||||
|
||||
dashboard_data = self.tag_batch_main_dashboard(org=org)
|
||||
|
||||
return Response(dashboard_data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(
|
||||
methods=['get'],
|
||||
detail=True,
|
||||
url_name='inner_dashboard',
|
||||
url_path='inner_dashboard',
|
||||
name='inner_dashboard',
|
||||
)
|
||||
def inner_dashboard(self, request, pk=None):
|
||||
"""
|
||||
dashboard of tag batches inner page by id
|
||||
"""
|
||||
|
||||
org = get_organization_by_user(request.user)
|
||||
|
||||
dashboard_data = self.tag_batch_main_dashboard(
|
||||
org=org,
|
||||
batch=self.queryset.filter(id=self.get_object().id)
|
||||
)
|
||||
|
||||
return Response(dashboard_data, status=status.HTTP_200_OK)
|
||||
|
||||
def destroy(self, request, pk=None, *args, **kwargs):
|
||||
"""
|
||||
soft delete batch with tag items
|
||||
@@ -323,3 +392,413 @@ class TagBatchViewSet(BaseViewSet, SoftDeleteMixin, DynamicSearchMixin, viewsets
|
||||
tag.soft_delete()
|
||||
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class TagDistributionViewSet(
|
||||
BaseViewSet,
|
||||
SoftDeleteMixin,
|
||||
DynamicSearchMixin,
|
||||
viewsets.ModelViewSet,
|
||||
TagDistributionService
|
||||
):
|
||||
queryset = tag_models.TagDistribution.objects.all()
|
||||
serializer_class = TagDistributionSerializer
|
||||
filter_backends = [SearchFilter]
|
||||
search_fields = [
|
||||
'batch__batch_identity',
|
||||
'tag__tag_code',
|
||||
'assigner_org__name',
|
||||
'assigned_org__name',
|
||||
'species_code',
|
||||
]
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
"""
|
||||
list of tag distributions
|
||||
"""
|
||||
queryset = self.get_queryset(visibility_by_org_scope=True).filter(is_closed=False).order_by('-create_date')
|
||||
|
||||
queryset = self.filter_queryset(self.filter_query(queryset))
|
||||
|
||||
page = self.paginate_queryset(queryset)
|
||||
if page is not None: # noqa
|
||||
serializer = self.get_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
return Response(self.serializer_class(queryset).data)
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
"""
|
||||
create tag distributions with batch or without batch in random
|
||||
"""
|
||||
org = get_organization_by_user(request.user)
|
||||
data = request.data.copy()
|
||||
|
||||
distribution_data = self.create_distribution_from_batch(
|
||||
org=org,
|
||||
data=data
|
||||
)
|
||||
|
||||
serializer = self.serializer_class(distribution_data.get('tag_distributions'), many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def update(self, request, pk=None, *args, **kwargs):
|
||||
"""
|
||||
edit tag distribution with/without batch in random
|
||||
"""
|
||||
|
||||
org = get_organization_by_user(request.user)
|
||||
data = request.data.copy()
|
||||
dist_batch = tag_models.TagDistributionBatch.objects.get(id=pk)
|
||||
|
||||
distribution_data = self.edit_distribution_from_batch(org=org, data=data, dist_batch=dist_batch)
|
||||
|
||||
serializer = self.serializer_class(distribution_data.get('tag_distributions'), many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(
|
||||
methods=['post'],
|
||||
detail=True,
|
||||
url_path='distribute_distribution',
|
||||
url_name='distribute_distribution',
|
||||
name='distribute_distribution',
|
||||
)
|
||||
def create_distribute_from_distribution(self, request, pk=None):
|
||||
"""
|
||||
distribute from a tag distribution
|
||||
"""
|
||||
|
||||
data = request.data.copy()
|
||||
org = get_organization_by_user(request.user)
|
||||
dist_batch = tag_models.TagDistributionBatch.objects.get(id=pk)
|
||||
|
||||
distribution_data = self.create_distribution_from_distribution(
|
||||
org=org,
|
||||
tag_batch=dist_batch,
|
||||
data=data
|
||||
)
|
||||
|
||||
serializer = self.serializer_class(distribution_data.get('tag_distributions'), many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(
|
||||
methods=['put'],
|
||||
detail=True,
|
||||
url_path='edit_distribute_distribution',
|
||||
url_name='edit_distribute_distribution',
|
||||
name='edit_distribute_distribution',
|
||||
)
|
||||
def update_distribute_from_distribution(self, request, pk=None):
|
||||
"""
|
||||
update created distribution from distribution
|
||||
"""
|
||||
|
||||
data = request.data.copy()
|
||||
org = get_organization_by_user(request.user)
|
||||
dist_batch = tag_models.TagDistributionBatch.objects.get(id=pk)
|
||||
|
||||
distribution_data = self.edit_distribution_from_distribution(
|
||||
org=org,
|
||||
tag_batch=dist_batch,
|
||||
data=data
|
||||
)
|
||||
|
||||
serializer = self.serializer_class(distribution_data.get('tag_distributions'), many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(
|
||||
methods=['get'],
|
||||
detail=True,
|
||||
url_name='dist_by_batch',
|
||||
url_path='dist_by_batch',
|
||||
name='dist_by_batch'
|
||||
)
|
||||
def get_dist_by_batch(self, request, pk=None):
|
||||
"""
|
||||
get distributions by batch
|
||||
"""
|
||||
batch = tag_models.TagDistributionBatch.objects.get(id=pk)
|
||||
distributions = batch.distributions.all()
|
||||
|
||||
page = self.paginate_queryset(distributions)
|
||||
if page is not None: # noqa
|
||||
serializer = self.get_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
return Response(self.serializer_class(distributions).data)
|
||||
|
||||
@action(
|
||||
methods=['post'],
|
||||
detail=True,
|
||||
url_name='close_distribution',
|
||||
url_path='close_distribution',
|
||||
name='close_distribution',
|
||||
)
|
||||
def close_tag_distribution(self, request, pk=None):
|
||||
distribution = self.get_object()
|
||||
distribution.is_closed = True
|
||||
distribution.save()
|
||||
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
@action(
|
||||
methods=['get'],
|
||||
detail=False,
|
||||
url_path='close_distributions_list',
|
||||
url_name='close_distributions_list',
|
||||
name='close_distributions_list',
|
||||
)
|
||||
def close_distributions_list(self, request):
|
||||
"""
|
||||
list of closed distributions
|
||||
"""
|
||||
|
||||
queryset = self.get_queryset(visibility_by_org_scope=True).filter(is_closed=True).order_by('-create_date')
|
||||
queryset = self.filter_query(queryset)
|
||||
|
||||
page = self.paginate_queryset(queryset)
|
||||
if page is not None: # noqa
|
||||
serializer = self.get_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
return Response(self.serializer_class(queryset).data)
|
||||
|
||||
|
||||
class TagDistributionBatchViewSet(
|
||||
BaseViewSet,
|
||||
viewsets.ModelViewSet,
|
||||
SoftDeleteMixin,
|
||||
DynamicSearchMixin,
|
||||
TagDistributionService
|
||||
):
|
||||
queryset = tag_models.TagDistributionBatch.objects.all()
|
||||
serializer_class = TagDistributionBatchSerializer
|
||||
filter_backends = [SearchFilter]
|
||||
search_filter = [
|
||||
'dist_batch_identity',
|
||||
'assigner_org__name',
|
||||
'assigned_org__name',
|
||||
'total_tag_count',
|
||||
'is_closed',
|
||||
]
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
"""
|
||||
list of tag distribution batches
|
||||
"""
|
||||
org = get_organization_by_user(request.user)
|
||||
|
||||
queryset = self.get_queryset(
|
||||
visibility_by_org_scope=True
|
||||
).filter(
|
||||
is_closed=False,
|
||||
top_root_distribution=True,
|
||||
).order_by('-create_date')
|
||||
|
||||
if not queryset:
|
||||
queryset = self.get_queryset(
|
||||
visibility_by_org_scope=True
|
||||
).filter(
|
||||
is_closed=False,
|
||||
owner_org=org,
|
||||
top_root_distribution=False,
|
||||
).order_by('-create_date')
|
||||
|
||||
queryset = self.filter_query(self.filter_queryset(queryset))
|
||||
|
||||
page = self.paginate_queryset(queryset)
|
||||
if page is not None: # noqa
|
||||
serializer = self.get_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
return Response(self.serializer_class(queryset).data)
|
||||
|
||||
def retrieve(self, request, pk=None, *args, **kwargs):
|
||||
"""
|
||||
detail of distribution batch
|
||||
"""
|
||||
|
||||
distribution_batch = self.get_object()
|
||||
serializer = self.serializer_class(distribution_batch)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(
|
||||
methods=['get'],
|
||||
detail=True,
|
||||
url_path='child_list',
|
||||
url_name='child_list',
|
||||
name='child_list'
|
||||
)
|
||||
def child_list(self, request, pk=None):
|
||||
"""
|
||||
list of all child from a tag distribution batch
|
||||
"""
|
||||
dist_batch = self.get_object()
|
||||
queryset = dist_batch.children.all()
|
||||
|
||||
page = self.paginate_queryset(queryset)
|
||||
if page is not None: # noqa
|
||||
serializer = self.get_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
return Response(self.serializer_class(queryset).data)
|
||||
|
||||
@action(
|
||||
methods=['post'],
|
||||
detail=True,
|
||||
url_name='close_dist_batch',
|
||||
url_path='close_dist_batch',
|
||||
name='close_dist_batch',
|
||||
)
|
||||
def close_tag_dist_batch(self, request, pk=None):
|
||||
dist_batch = self.get_object()
|
||||
|
||||
# close distribution batch
|
||||
dist_batch.is_closed = True
|
||||
dist_batch.save()
|
||||
dist_batch.distributions.all().update(is_closed=True) # close distributions of batch
|
||||
for distribute in dist_batch.distributions.all():
|
||||
distribute.tag.all().update(status='F')
|
||||
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
@action(
|
||||
methods=['get'],
|
||||
detail=False,
|
||||
url_path='closed_tag_dist_batch_list',
|
||||
url_name='closed_tag_dist_batch_list',
|
||||
name='closed_tag_dist_batch_list',
|
||||
)
|
||||
def close_tag_dist_batch_list(self, request):
|
||||
"""
|
||||
list of closed tag distributions batch
|
||||
"""
|
||||
|
||||
queryset = self.get_queryset(visibility_by_org_scope=True).filter(is_closed=True).order_by('create_date')
|
||||
queryset = self.filter_query(self.filter_queryset(queryset))
|
||||
|
||||
page = self.paginate_queryset(queryset)
|
||||
if page is not None: # noqa
|
||||
serializer = self.get_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
return Response(self.serializer_class(queryset).data)
|
||||
|
||||
@action(
|
||||
methods=['post'],
|
||||
detail=True,
|
||||
url_name='reactivate_tag_dist_batch',
|
||||
url_path='reactivate_tag_dist_batch',
|
||||
name='reactivate_tag_dist_batch',
|
||||
)
|
||||
def reactivate_tag_dist_batch(self, request, pk=None):
|
||||
"""
|
||||
reactivate canceled distribution batch
|
||||
"""
|
||||
|
||||
dist_batch = self.get_object()
|
||||
dist_batch.is_closed = False
|
||||
dist_batch.save(update_fields=['is_closed']) #
|
||||
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
@action(
|
||||
methods=['get'],
|
||||
detail=False,
|
||||
url_path='main_dashboard',
|
||||
url_name='main_dashboard',
|
||||
name='main_dashboard'
|
||||
)
|
||||
def main_dashboard(self, request):
|
||||
"""
|
||||
dashboard of main page
|
||||
"""
|
||||
org = get_organization_by_user(request.user)
|
||||
params = self.request.query_params # noqa
|
||||
dashboard_data = self.distribution_batch_main_dashboard(org=org, is_closed=params.get('is_closed'))
|
||||
|
||||
return Response(dashboard_data, status=status.HTTP_200_OK)
|
||||
|
||||
# @action(
|
||||
# methods=['get'],
|
||||
# detail=True,
|
||||
# url_path='distribution_pdf_view',
|
||||
# url_name='distribution_pdf_view',
|
||||
# name='distribution_pdf_view',
|
||||
# )
|
||||
# def distribution_pdf_view(self, request, pk=None):
|
||||
# batch = tag_models.TagDistributionBatch.objects.select_related(
|
||||
# 'assigner_org', 'assigned_org'
|
||||
# ).prefetch_related('distributions').get(id=pk)
|
||||
#
|
||||
# html_string = render_to_string(
|
||||
# 'pdf/tag_distribution.html', # noqa
|
||||
# {'batch': batch}
|
||||
# )
|
||||
#
|
||||
# html = HTML(
|
||||
# string=html_string,
|
||||
# base_url=request.build_absolute_uri('/')
|
||||
# )
|
||||
#
|
||||
# pdf = html.write_pdf()
|
||||
#
|
||||
# response = HttpResponse(pdf, content_type='application/pdf')
|
||||
# response['Content-Disposition'] = (
|
||||
# f'inline; filename="distribution_{batch.dist_batch_identity}.pdf"'
|
||||
# )
|
||||
#
|
||||
# return response
|
||||
|
||||
@action(
|
||||
methods=['post', ],
|
||||
detail=True,
|
||||
url_name='assign_document',
|
||||
url_path='assign_document',
|
||||
name='assign_document'
|
||||
)
|
||||
@transaction.atomic
|
||||
def assign_document(self, request, pk=None):
|
||||
""" set document for tag assignment """
|
||||
|
||||
# get tag assignment object & set document url
|
||||
dist_batch = self.queryset.get(id=pk)
|
||||
|
||||
# upload document file to liara storage
|
||||
document = request.FILES.get('dist_exit_document')
|
||||
document_url = upload_to_storage(
|
||||
document,
|
||||
f'{random.randint(1000, 9999)}_distribution_batch_document.{str(document).split(".")[1]}'
|
||||
)
|
||||
dist_batch.warehouse_exit_doc = document_url
|
||||
dist_batch.save(update_fields=['warehouse_exit_doc'])
|
||||
serializer = self.serializer_class(dist_batch)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(
|
||||
methods=['post'],
|
||||
detail=True,
|
||||
url_path='accept_exit_doc',
|
||||
url_name='accept_exit_doc',
|
||||
name='accept_exit_doc',
|
||||
)
|
||||
def accept_exit_doc(self, request, pk=None):
|
||||
"""
|
||||
accept exit document from warehouse on distribution batch
|
||||
"""
|
||||
|
||||
dist_batch = self.get_object()
|
||||
dist_batch.exit_doc_status = True
|
||||
dist_batch.save(update_fields=['exit_doc_status'])
|
||||
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
def destroy(self, request, pk=None, *args, **kwargs):
|
||||
"""
|
||||
delete tag distribution batch and free their tag from distribute
|
||||
"""
|
||||
|
||||
dist_batch = self.get_object()
|
||||
|
||||
for distribute in dist_batch.distributions.all():
|
||||
distribute.tag.all().update(status='F')
|
||||
distribute.tag.clear()
|
||||
distribute.soft_delete()
|
||||
|
||||
dist_batch.soft_delete()
|
||||
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
@@ -136,6 +136,11 @@ class TagBatchSerializer(serializers.ModelSerializer):
|
||||
def to_representation(self, instance):
|
||||
representation = super().to_representation(instance)
|
||||
|
||||
representation['organization'] = {
|
||||
'id': instance.organization.id,
|
||||
'name': instance.organization.name
|
||||
}
|
||||
|
||||
representation['tag'] = [{
|
||||
'tag_code': tag.tag_code,
|
||||
'species_code': tag.species_code,
|
||||
@@ -143,3 +148,67 @@ class TagBatchSerializer(serializers.ModelSerializer):
|
||||
} for tag in instance.tag.all()]
|
||||
|
||||
return representation
|
||||
|
||||
|
||||
class TagDistributionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = tag_models.TagDistribution
|
||||
fields = '__all__'
|
||||
|
||||
def to_representation(self, instance):
|
||||
"""
|
||||
customize output of serializer
|
||||
"""
|
||||
representation = super().to_representation(instance)
|
||||
|
||||
if instance.batch:
|
||||
representation['batch'] = {
|
||||
'id': instance.batch.id,
|
||||
'batch_creator': instance.batch.organization.name,
|
||||
'batch_identity': instance.batch.batch_identity
|
||||
}
|
||||
|
||||
representation['assigner_org'] = {
|
||||
'id': instance.assigner_org.id,
|
||||
'name': instance.assigner_org.name
|
||||
}
|
||||
|
||||
representation['assigned_org'] = {
|
||||
'id': instance.assigned_org.id,
|
||||
'name': instance.assigned_org.name
|
||||
}
|
||||
|
||||
return representation
|
||||
|
||||
|
||||
class TagDistributionBatchSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = tag_models.TagDistributionBatch
|
||||
fields = '__all__'
|
||||
|
||||
def to_representation(self, instance):
|
||||
representation = super().to_representation(instance)
|
||||
|
||||
representation['assigner_org'] = {
|
||||
'id': instance.assigner_org.id,
|
||||
'name': instance.assigner_org.name,
|
||||
}
|
||||
|
||||
representation['assigned_org'] = {
|
||||
'id': instance.assigned_org.id,
|
||||
'name': instance.assigned_org.name
|
||||
}
|
||||
|
||||
representation['distributions'] = [{
|
||||
'id': dist.id,
|
||||
'dist_identity': dist.dist_identity,
|
||||
'batch_identity': dist.batch.batch_identity if dist.batch else None,
|
||||
'species_code': dist.species_code,
|
||||
'distributed_number': dist.distributed_number,
|
||||
'total_tag_count': dist.total_tag_count,
|
||||
'remaining_number': dist.remaining_number,
|
||||
'serial_from': dist.batch.serial_from if dist.batch else None,
|
||||
'serial_to': dist.batch.serial_to if dist.batch else None,
|
||||
} for dist in instance.distributions.all()]
|
||||
|
||||
return representation
|
||||
|
||||
@@ -4,7 +4,7 @@ from rest_framework.routers import DefaultRouter
|
||||
from .api import (
|
||||
TagViewSet,
|
||||
TagAssignmentViewSet,
|
||||
AllocatedTagsViewSet, TagBatchViewSet
|
||||
AllocatedTagsViewSet, TagBatchViewSet, TagDistributionViewSet, TagDistributionBatchViewSet
|
||||
)
|
||||
|
||||
router = DefaultRouter()
|
||||
@@ -12,6 +12,8 @@ router.register(r'tag', TagViewSet, basename='tag')
|
||||
router.register(r'tag_assignment', TagAssignmentViewSet, basename='tag_assignment')
|
||||
router.register(r'allocated_tag', AllocatedTagsViewSet, basename='allocated_tag')
|
||||
router.register(r'tag_batch', TagBatchViewSet, basename='tag_batch')
|
||||
router.register(r'tag_distribution', TagDistributionViewSet, basename='tag_distribution')
|
||||
router.register(r'tag_distribution_batch', TagDistributionBatchViewSet, basename='tag_distribution_batch')
|
||||
|
||||
urlpatterns = [
|
||||
path('v1/', include(router.urls))
|
||||
|
||||
@@ -12,7 +12,14 @@ from apps.warehouse.models import InventoryQuotaSaleTransaction, InventoryQuotaS
|
||||
class TransactionDashboardService:
|
||||
|
||||
@staticmethod
|
||||
def get_dashboard(org: Organization, start_date: str = None, end_date: str = None, status: str = None):
|
||||
def get_dashboard(
|
||||
org: Organization,
|
||||
free_visibility_tr_objects=None,
|
||||
free_visibility_tr_item_objects=None,
|
||||
start_date: str = None,
|
||||
end_date: str = None,
|
||||
status: str = None
|
||||
):
|
||||
|
||||
orgs_child = get_all_org_child(org=org)
|
||||
orgs_child.append(org)
|
||||
@@ -23,13 +30,18 @@ class TransactionDashboardService:
|
||||
items = InventoryQuotaSaleItem.objects.all().select_related("gov_product", "free_product")
|
||||
|
||||
else:
|
||||
transactions = InventoryQuotaSaleTransaction.objects.filter(
|
||||
seller_organization__in=orgs_child
|
||||
)
|
||||
|
||||
items = InventoryQuotaSaleItem.objects.filter(
|
||||
transaction__seller_organization__in=orgs_child
|
||||
).select_related("gov_product", "free_product")
|
||||
if free_visibility_tr_objects:
|
||||
transactions = free_visibility_tr_objects
|
||||
items = InventoryQuotaSaleItem.objects.filter(
|
||||
transaction__in=transactions
|
||||
).select_related("gov_product", "free_product")
|
||||
else:
|
||||
transactions = InventoryQuotaSaleTransaction.objects.filter(
|
||||
seller_organization__in=orgs_child
|
||||
)
|
||||
items = InventoryQuotaSaleItem.objects.filter(
|
||||
transaction__seller_organization__in=orgs_child
|
||||
).select_related("gov_product", "free_product")
|
||||
|
||||
# filter queryset (transactions & items) by date
|
||||
if start_date and end_date:
|
||||
|
||||
@@ -280,13 +280,31 @@ class InventoryQuotaSaleTransactionViewSet(
|
||||
transaction_status = query_param.get('status') if 'status' in query_param.keys() else None
|
||||
|
||||
org = get_organization_by_user(request.user)
|
||||
# filer by date & transaction status
|
||||
transaction_dashboard_data = self.get_dashboard(
|
||||
org,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
status=transaction_status
|
||||
)
|
||||
if org.free_visibility_by_scope:
|
||||
|
||||
tr_objects = self.get_queryset(visibility_by_org_scope=True)
|
||||
|
||||
tr_item_view = InventoryQuotaSaleItemViewSet()
|
||||
tr_item_view.request = request
|
||||
tr_item_view.kwargs = {'pk': None}
|
||||
tr_item_objects = tr_item_view.get_queryset(visibility_by_org_scope=True)
|
||||
|
||||
transaction_dashboard_data = self.get_dashboard(
|
||||
org,
|
||||
free_visibility_tr_objects=tr_objects,
|
||||
free_visibility_tr_item_objects=tr_item_objects,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
status=transaction_status,
|
||||
)
|
||||
else:
|
||||
# filer by date & transaction status
|
||||
transaction_dashboard_data = self.get_dashboard(
|
||||
org,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
status=transaction_status,
|
||||
)
|
||||
|
||||
return Response(transaction_dashboard_data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import base64
|
||||
import random
|
||||
from datetime import datetime
|
||||
from functools import lru_cache
|
||||
|
||||
@@ -56,3 +57,10 @@ def parse_birthdate(jalali_str):
|
||||
gregorian_dt,
|
||||
timezone.get_current_timezone()
|
||||
)
|
||||
|
||||
|
||||
def generate_unique_code(prefix: str):
|
||||
now = timezone.now()
|
||||
date_part = now.strftime("%Y%m%d")
|
||||
rand_part = random.randint(100000, 999999)
|
||||
return f"{prefix}{date_part}{rand_part}"
|
||||
|
||||
@@ -1,4 +1,31 @@
|
||||
import boto3
|
||||
from botocore.exceptions import NoCredentialsError
|
||||
|
||||
STORAGE_ENDPOINT = 'https://s3.rasadyar.com/rasaddam'
|
||||
STORAGE_BUCKET_NAME = 'ticket-rasadyar'
|
||||
STORAGE_ACCESS_KEY = "zG3ewsbYsTqCmuws"
|
||||
STORAGE_SECRET_KEY = 'RInUMB78zlQZp6CNf8+sRoSh2cNDHcGQhXrLnTJ1AuI='
|
||||
|
||||
|
||||
def upload_to_storage(file_obj, file_name):
|
||||
try:
|
||||
s3 = boto3.client(
|
||||
's3',
|
||||
endpoint_url=STORAGE_ENDPOINT,
|
||||
aws_access_key_id=STORAGE_ACCESS_KEY,
|
||||
aws_secret_access_key=STORAGE_SECRET_KEY
|
||||
)
|
||||
|
||||
s3.upload_fileobj(
|
||||
file_obj,
|
||||
STORAGE_BUCKET_NAME,
|
||||
file_name,
|
||||
ExtraArgs={'ACL': 'public-read'} # دسترسی عمومی
|
||||
)
|
||||
|
||||
return f"{STORAGE_ENDPOINT}/{STORAGE_BUCKET_NAME}/{file_name}"
|
||||
|
||||
except NoCredentialsError:
|
||||
raise Exception("اعتبارنامههای AWS معتبر نیستند")
|
||||
except Exception as e:
|
||||
raise Exception(f"خطا در آپلود فایل: {e}")
|
||||
|
||||
@@ -84,3 +84,4 @@ channels_redis
|
||||
daphne
|
||||
django-jazzmin
|
||||
python-dotenv
|
||||
weasyprint
|
||||
Reference in New Issue
Block a user