Compare commits
33 Commits
0b08107c14
...
developmen
| Author | SHA1 | Date | |
|---|---|---|---|
| 858af1231b | |||
| 2c87642f56 | |||
| 1e773ef53d | |||
| 67fa1e23e7 | |||
| c02c165ff9 | |||
| f13851a30d | |||
| aa8e39c25b | |||
| 66554159ea | |||
| 13caa78087 | |||
| 6c291e2294 | |||
| 7618518dc7 | |||
| 8bea18e676 | |||
| 3b6deb0356 | |||
| 8449e2ef1b | |||
| 627168b82a | |||
| 8296214401 | |||
| 84ea3b35d2 | |||
| dbd0715c54 | |||
| 241a3551b7 | |||
| e00bd72a58 | |||
| 77f35fe350 | |||
| bec3405454 | |||
| ce9e45667f | |||
| 3209dd2d31 | |||
| 0a84ca6fe4 | |||
| cc81bc8a3d | |||
| 3258991014 | |||
| fb58e6c3aa | |||
| c5b87e8591 | |||
| 0dd145131f | |||
| 74a870380e | |||
| f798b72dbc | |||
| 93180edc0b |
@@ -17,6 +17,15 @@ RUN pip config --user set global.index https://mirror-pypi.runflare.com/simple
|
|||||||
RUN pip config --user set global.index-url https://mirror-pypi.runflare.com/simple
|
RUN pip config --user set global.index-url https://mirror-pypi.runflare.com/simple
|
||||||
RUN pip config --user set global.trusted-host mirror-pypi.runflare.com
|
RUN pip config --user set global.trusted-host mirror-pypi.runflare.com
|
||||||
RUN pip install --upgrade pip
|
RUN pip install --upgrade pip
|
||||||
|
#RUN apt-get update && apt-get install -y \
|
||||||
|
# libcairo2 \
|
||||||
|
# libpango-1.0-0 \
|
||||||
|
# libpangocairo-1.0-0 \
|
||||||
|
# libgdk-pixbuf2.0-0 \
|
||||||
|
# libffi-dev \
|
||||||
|
# shared-mime-info \
|
||||||
|
# fonts-dejavu \
|
||||||
|
# && rm -rf /var/lib/apt/lists/*
|
||||||
COPY ./requirements.txt .
|
COPY ./requirements.txt .
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ from apps.authentication.models import (
|
|||||||
Organization,
|
Organization,
|
||||||
OrganizationType,
|
OrganizationType,
|
||||||
BankAccountInformation,
|
BankAccountInformation,
|
||||||
BlacklistedAccessToken
|
BlacklistedAccessToken, OrganizationLocationInfo
|
||||||
)
|
)
|
||||||
from apps.authentication.tools import get_token_jti
|
from apps.authentication.tools import get_token_jti
|
||||||
from apps.authorization.api.v1 import api as authorize_view
|
from apps.authorization.api.v1 import api as authorize_view
|
||||||
@@ -261,14 +261,21 @@ class OrganizationViewSet(BaseViewSet, ModelViewSet, DynamicSearchMixin):
|
|||||||
def list(self, request, *args, **kwargs):
|
def list(self, request, *args, **kwargs):
|
||||||
""" all organization """
|
""" all organization """
|
||||||
org = get_organization_by_user(request.user)
|
org = get_organization_by_user(request.user)
|
||||||
|
param = self.request.query_params # noqa
|
||||||
|
|
||||||
queryset = self.get_queryset(
|
queryset = self.get_queryset(
|
||||||
visibility_by_org_scope=True
|
visibility_by_org_scope=True
|
||||||
) if org.free_visibility_by_scope else self.get_queryset()
|
) if org.free_visibility_by_scope else self.get_queryset()
|
||||||
|
|
||||||
query = self.filter_query(queryset)
|
# filter by organization type
|
||||||
|
if 'org_type' in param.keys():
|
||||||
|
queryset = queryset.filter(type__id=int(param.get('org_type', 0)))
|
||||||
|
|
||||||
page = self.paginate_queryset(query.order_by('-create_date')) # paginate queryset
|
# filter on search
|
||||||
|
if 'search' in param.keys():
|
||||||
|
queryset = self.filter_query(queryset)
|
||||||
|
|
||||||
|
page = self.paginate_queryset(queryset.order_by('-create_date')) # paginate queryset
|
||||||
|
|
||||||
if page is not None: # noqa
|
if page is not None: # noqa
|
||||||
serializer = self.serializer_class(page, many=True)
|
serializer = self.serializer_class(page, many=True)
|
||||||
@@ -284,6 +291,18 @@ class OrganizationViewSet(BaseViewSet, ModelViewSet, DynamicSearchMixin):
|
|||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
organization = serializer.save()
|
organization = serializer.save()
|
||||||
|
|
||||||
|
if 'addresses' in request.data.keys():
|
||||||
|
# import multiple addresses with postal_code to orgs
|
||||||
|
|
||||||
|
address_obj_list = []
|
||||||
|
for addr in request.data['addresses']:
|
||||||
|
addr.update({'org': organization})
|
||||||
|
address_obj_list.append(
|
||||||
|
OrganizationLocationInfo(**addr)
|
||||||
|
)
|
||||||
|
|
||||||
|
OrganizationLocationInfo.objects.bulk_create(address_obj_list)
|
||||||
|
|
||||||
if 'user_relations' in request.data.keys():
|
if 'user_relations' in request.data.keys():
|
||||||
user_relations = CustomOperations().custom_create( # create user relations
|
user_relations = CustomOperations().custom_create( # create user relations
|
||||||
request=request,
|
request=request,
|
||||||
@@ -315,6 +334,22 @@ class OrganizationViewSet(BaseViewSet, ModelViewSet, DynamicSearchMixin):
|
|||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
organization = serializer.save()
|
organization = serializer.save()
|
||||||
|
|
||||||
|
if 'addresses' in request.data.keys():
|
||||||
|
# import multiple addresses with postal_code to orgs
|
||||||
|
|
||||||
|
locations = organization.locations.all()
|
||||||
|
locations.delete() # remove ex locations
|
||||||
|
|
||||||
|
# create new locations
|
||||||
|
address_obj_list = []
|
||||||
|
for addr in request.data['addresses']:
|
||||||
|
addr.update({'org': organization})
|
||||||
|
address_obj_list.append(
|
||||||
|
OrganizationLocationInfo(**addr)
|
||||||
|
)
|
||||||
|
|
||||||
|
OrganizationLocationInfo.objects.bulk_create(address_obj_list)
|
||||||
|
|
||||||
if 'user_relations' in request.data.keys():
|
if 'user_relations' in request.data.keys():
|
||||||
user_relations = CustomOperations().custom_update( # update user relations
|
user_relations = CustomOperations().custom_update( # update user relations
|
||||||
request=request,
|
request=request,
|
||||||
|
|||||||
@@ -258,6 +258,7 @@ class OrganizationSerializer(serializers.ModelSerializer):
|
|||||||
'address',
|
'address',
|
||||||
'parent_organization',
|
'parent_organization',
|
||||||
'national_unique_id',
|
'national_unique_id',
|
||||||
|
'unique_unit_identity',
|
||||||
'company_code',
|
'company_code',
|
||||||
'field_of_activity',
|
'field_of_activity',
|
||||||
'free_visibility_by_scope',
|
'free_visibility_by_scope',
|
||||||
@@ -363,6 +364,13 @@ class OrganizationSerializer(serializers.ModelSerializer):
|
|||||||
'name': city.name,
|
'name': city.name,
|
||||||
} for city in instance.service_area.all()
|
} for city in instance.service_area.all()
|
||||||
]
|
]
|
||||||
|
|
||||||
|
representation['addresses'] = [
|
||||||
|
{
|
||||||
|
"postal_code": addr.postal_code,
|
||||||
|
"address": addr.address
|
||||||
|
} for addr in instance.locations.all()
|
||||||
|
]
|
||||||
return representation
|
return representation
|
||||||
|
|
||||||
def update(self, instance, validated_data):
|
def update(self, instance, validated_data):
|
||||||
@@ -374,6 +382,7 @@ class OrganizationSerializer(serializers.ModelSerializer):
|
|||||||
instance.address = validated_data.get('address', instance.address)
|
instance.address = validated_data.get('address', instance.address)
|
||||||
instance.parent_organization = validated_data.get('parent_organization', instance.parent_organization)
|
instance.parent_organization = validated_data.get('parent_organization', instance.parent_organization)
|
||||||
instance.national_unique_id = validated_data.get('national_unique_id', instance.national_unique_id)
|
instance.national_unique_id = validated_data.get('national_unique_id', instance.national_unique_id)
|
||||||
|
instance.unique_unit_identity = validated_data.get('unique_unit_identity', instance.unique_unit_identity)
|
||||||
instance.purchase_policy = validated_data.get('purchase_policy', instance.purchase_policy)
|
instance.purchase_policy = validated_data.get('purchase_policy', instance.purchase_policy)
|
||||||
instance.free_visibility_by_scope = validated_data.get(
|
instance.free_visibility_by_scope = validated_data.get(
|
||||||
'free_visibility_by_scope',
|
'free_visibility_by_scope',
|
||||||
|
|||||||
@@ -0,0 +1,16 @@
|
|||||||
|
# Generated by Django 5.0 on 2026-02-09 06:41
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
('authentication', '0060_organization_ownership_code'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='organization',
|
||||||
|
name='unique_unit_identity',
|
||||||
|
field=models.CharField(default='0', max_length=150),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
# Generated by Django 5.0 on 2026-02-09 06:57
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('authentication', '0061_organization_unique_unit_identity_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='OrganizationLocationInfo',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('create_date', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('modify_date', models.DateTimeField(auto_now=True)),
|
||||||
|
('creator_info', models.CharField(max_length=100, null=True)),
|
||||||
|
('modifier_info', models.CharField(max_length=100, null=True)),
|
||||||
|
('trash', models.BooleanField(default=False)),
|
||||||
|
('postal_code', models.CharField(blank=True, max_length=150, null=True)),
|
||||||
|
('address', models.TextField(blank=True, max_length=2000, null=True)),
|
||||||
|
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(class)s_createddby', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(class)s_modifiedby', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('org', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='locations', to='authentication.organization')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -116,6 +116,7 @@ class Organization(BaseModel):
|
|||||||
null=True
|
null=True
|
||||||
)
|
)
|
||||||
national_unique_id = models.CharField(max_length=30, default="0")
|
national_unique_id = models.CharField(max_length=30, default="0")
|
||||||
|
unique_unit_identity = models.CharField(max_length=150, default="0")
|
||||||
activity_fields = (
|
activity_fields = (
|
||||||
('CO', 'Country'),
|
('CO', 'Country'),
|
||||||
('PR', 'Province'),
|
('PR', 'Province'),
|
||||||
@@ -167,6 +168,23 @@ class Organization(BaseModel):
|
|||||||
super(Organization, self).save(*args, **kwargs)
|
super(Organization, self).save(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class OrganizationLocationInfo(BaseModel):
|
||||||
|
org = models.ForeignKey(
|
||||||
|
Organization,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name='locations',
|
||||||
|
null=True
|
||||||
|
)
|
||||||
|
postal_code = models.CharField(max_length=150, null=True, blank=True)
|
||||||
|
address = models.TextField(max_length=2000, null=True, blank=True)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f'{self.org.name}-{self.postal_code}-{self.address}'
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
super(OrganizationLocationInfo, self).save(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class OrganizationStats(BaseModel):
|
class OrganizationStats(BaseModel):
|
||||||
organization = models.OneToOneField(
|
organization = models.OneToOneField(
|
||||||
Organization,
|
Organization,
|
||||||
|
|||||||
@@ -42,7 +42,6 @@ class HerdRancherSyncService:
|
|||||||
seen_in_batch = set()
|
seen_in_batch = set()
|
||||||
|
|
||||||
for temp in queryset.iterator(chunk_size=batch_size):
|
for temp in queryset.iterator(chunk_size=batch_size):
|
||||||
|
|
||||||
rancher = rancher_map.get(temp.rancher_national_code)
|
rancher = rancher_map.get(temp.rancher_national_code)
|
||||||
|
|
||||||
if not rancher:
|
if not rancher:
|
||||||
@@ -52,7 +51,7 @@ class HerdRancherSyncService:
|
|||||||
national_code=temp.rancher_national_code,
|
national_code=temp.rancher_national_code,
|
||||||
rancher_type='N',
|
rancher_type='N',
|
||||||
city_id=city_map.get(temp.city.strip()),
|
city_id=city_map.get(temp.city.strip()),
|
||||||
province_id=30
|
province_id=28
|
||||||
)
|
)
|
||||||
new_ranchers.append(rancher)
|
new_ranchers.append(rancher)
|
||||||
rancher_map[temp.rancher_national_code] = rancher
|
rancher_map[temp.rancher_national_code] = rancher
|
||||||
@@ -78,7 +77,7 @@ class HerdRancherSyncService:
|
|||||||
postal=temp.postal_code,
|
postal=temp.postal_code,
|
||||||
unit_unique_id=temp.unit_unique_id,
|
unit_unique_id=temp.unit_unique_id,
|
||||||
city_id=city_map.get(temp.city.strip()),
|
city_id=city_map.get(temp.city.strip()),
|
||||||
province_id=30
|
province_id=28
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.utils.dateparse import parse_datetime
|
from django.utils.dateparse import parse_datetime
|
||||||
from apps.livestock.models import LiveStock, LiveStockType, LiveStockSpecies
|
|
||||||
from apps.herd.models import Herd
|
from apps.herd.models import Herd
|
||||||
|
from apps.livestock.models import LiveStock, LiveStockType, LiveStockSpecies
|
||||||
from apps.tag.models import Tag
|
from apps.tag.models import Tag
|
||||||
|
|
||||||
|
|
||||||
@@ -16,6 +17,7 @@ class Command(BaseCommand):
|
|||||||
path = options['excel_path']
|
path = options['excel_path']
|
||||||
df = pd.read_excel(path)
|
df = pd.read_excel(path)
|
||||||
records = df.to_dict(orient='records')
|
records = df.to_dict(orient='records')
|
||||||
|
print(records[1])
|
||||||
|
|
||||||
self.stdout.write(self.style.SUCCESS(f"{len(records)} records loaded."))
|
self.stdout.write(self.style.SUCCESS(f"{len(records)} records loaded."))
|
||||||
|
|
||||||
@@ -46,7 +48,7 @@ class Command(BaseCommand):
|
|||||||
herd_cache[herd_code] = herd
|
herd_cache[herd_code] = herd
|
||||||
|
|
||||||
tag_code = r.get('national_id_livestock_code')
|
tag_code = r.get('national_id_livestock_code')
|
||||||
tag = Tag.objects.filter(code=tag_code).first()
|
tag = Tag.objects.filter(tag_code=tag_code).first()
|
||||||
if not tag:
|
if not tag:
|
||||||
skipped += 1
|
skipped += 1
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -0,0 +1,144 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import jdatetime
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.db import transaction
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
from apps.herd.models import Herd
|
||||||
|
from apps.livestock.models import (
|
||||||
|
LiveStock,
|
||||||
|
LiveStockSpecies,
|
||||||
|
ExcelLiveStocks
|
||||||
|
)
|
||||||
|
|
||||||
|
BATCH_SIZE = 100
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Import livestock from ExcelLiveStocks into LiveStock using bulk_create"
|
||||||
|
|
||||||
|
def normalize_herd_code(self, value, length=10):
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
return str(value).strip().zfill(length)
|
||||||
|
|
||||||
|
def parse_jalali_datetime(self, date_str: str):
|
||||||
|
if not date_str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
year, month, day = map(int, date_str.split('/'))
|
||||||
|
|
||||||
|
# jalali → gregorian (date)
|
||||||
|
g_date = jdatetime.date(year, month, day).togregorian()
|
||||||
|
|
||||||
|
# date → naive datetime
|
||||||
|
naive_dt = datetime.combine(g_date, datetime.min.time())
|
||||||
|
|
||||||
|
# naive → aware (VERY IMPORTANT)
|
||||||
|
return timezone.make_aware(naive_dt)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
qs = ExcelLiveStocks.objects.all()
|
||||||
|
|
||||||
|
if not qs.exists():
|
||||||
|
self.stdout.write(self.style.WARNING("No records to import"))
|
||||||
|
return
|
||||||
|
|
||||||
|
# ---------- preload lookups ----------
|
||||||
|
herd_map = {
|
||||||
|
h.code: h
|
||||||
|
for h in Herd.objects.all()
|
||||||
|
}
|
||||||
|
|
||||||
|
species_map = {
|
||||||
|
s.name.strip(): s
|
||||||
|
for s in LiveStockSpecies.objects.all()
|
||||||
|
}
|
||||||
|
|
||||||
|
livestocks_to_create = []
|
||||||
|
processed_ids = []
|
||||||
|
|
||||||
|
created_count = 0
|
||||||
|
skipped = 0
|
||||||
|
|
||||||
|
self.stdout.write("Starting import...")
|
||||||
|
|
||||||
|
with transaction.atomic():
|
||||||
|
for row in qs.iterator(chunk_size=BATCH_SIZE):
|
||||||
|
herd = herd_map.get(self.normalize_herd_code(row.herd_code))
|
||||||
|
# print(self.normalize_herd_code(row.herd_code))
|
||||||
|
if not herd:
|
||||||
|
# print("herd")
|
||||||
|
skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
# species cache / create
|
||||||
|
species_name = (row.species or "").strip()
|
||||||
|
if not species_name:
|
||||||
|
# print("species")
|
||||||
|
skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
species = species_map.get(species_name)
|
||||||
|
if not species:
|
||||||
|
species = LiveStockSpecies.objects.create(
|
||||||
|
name=species_name
|
||||||
|
)
|
||||||
|
species_map[species_name] = species
|
||||||
|
|
||||||
|
livestocks_to_create.append(
|
||||||
|
LiveStock(
|
||||||
|
herd=herd,
|
||||||
|
species=species,
|
||||||
|
gender=self.map_gender(row.gender),
|
||||||
|
birthdate=self.parse_jalali_datetime(row.birthdate),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
processed_ids.append(row.id)
|
||||||
|
|
||||||
|
if len(livestocks_to_create) >= BATCH_SIZE:
|
||||||
|
print("-----------------------------CREATE------------------------------------")
|
||||||
|
print(livestocks_to_create)
|
||||||
|
LiveStock.objects.bulk_create(
|
||||||
|
livestocks_to_create,
|
||||||
|
batch_size=BATCH_SIZE
|
||||||
|
)
|
||||||
|
created_count += len(livestocks_to_create)
|
||||||
|
livestocks_to_create.clear()
|
||||||
|
break
|
||||||
|
|
||||||
|
# flush remaining
|
||||||
|
if livestocks_to_create:
|
||||||
|
LiveStock.objects.bulk_create(
|
||||||
|
livestocks_to_create,
|
||||||
|
batch_size=BATCH_SIZE
|
||||||
|
)
|
||||||
|
created_count += len(livestocks_to_create)
|
||||||
|
|
||||||
|
# mark excel rows as archived
|
||||||
|
# ExcelLiveStocks.objects.filter(
|
||||||
|
# id__in=processed_ids
|
||||||
|
# ).update(archive=True)
|
||||||
|
|
||||||
|
self.stdout.write(self.style.SUCCESS(
|
||||||
|
f"Import finished. Created: {created_count}, Skipped: {skipped}"
|
||||||
|
))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def map_gender(value):
|
||||||
|
if not value:
|
||||||
|
return 1
|
||||||
|
value = value.strip().lower()
|
||||||
|
if value in ['female', 'f', 'ماده']:
|
||||||
|
return 2
|
||||||
|
return 1
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_date(value):
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
return datetime.strptime(value, '%Y/%m/%d')
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
38
apps/livestock/migrations/0019_excellivestocks.py
Normal file
38
apps/livestock/migrations/0019_excellivestocks.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# Generated by Django 5.0 on 2026-02-09 10:58
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('livestock', '0018_livestockspecies_en_name_alter_livestockspecies_name'),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='ExcelLiveStocks',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('create_date', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('modify_date', models.DateTimeField(auto_now=True)),
|
||||||
|
('creator_info', models.CharField(max_length=100, null=True)),
|
||||||
|
('modifier_info', models.CharField(max_length=100, null=True)),
|
||||||
|
('trash', models.BooleanField(default=False)),
|
||||||
|
('national_id', models.CharField(max_length=250, null=True)),
|
||||||
|
('herd_code', models.CharField(max_length=150, null=True)),
|
||||||
|
('species', models.CharField(max_length=250, null=True)),
|
||||||
|
('birthdate', models.CharField(max_length=150, null=True)),
|
||||||
|
('gender', models.CharField(max_length=150, null=True)),
|
||||||
|
('agent_code', models.CharField(max_length=150, null=True)),
|
||||||
|
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(class)s_createddby', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='%(class)s_modifiedby', to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 5.0 on 2026-02-10 08:02
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('livestock', '0019_excellivestocks'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='excellivestocks',
|
||||||
|
name='sync_status',
|
||||||
|
field=models.CharField(max_length=50, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -103,6 +103,16 @@ class LiveStock(BaseModel):
|
|||||||
return super(LiveStock, self).save(*args, **kwargs)
|
return super(LiveStock, self).save(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class ExcelLiveStocks(BaseModel):
|
||||||
|
national_id = models.CharField(max_length=250, null=True)
|
||||||
|
herd_code = models.CharField(max_length=150, null=True)
|
||||||
|
species = models.CharField(max_length=250, null=True)
|
||||||
|
birthdate = models.CharField(max_length=150, null=True)
|
||||||
|
gender = models.CharField(max_length=150, null=True)
|
||||||
|
agent_code = models.CharField(max_length=150, null=True)
|
||||||
|
sync_status = models.CharField(max_length=50, null=True)
|
||||||
|
|
||||||
|
|
||||||
class TemporaryLiveStock(BaseModel):
|
class TemporaryLiveStock(BaseModel):
|
||||||
rancher = models.ForeignKey(
|
rancher = models.ForeignKey(
|
||||||
herd_models.Rancher,
|
herd_models.Rancher,
|
||||||
|
|||||||
@@ -4,12 +4,12 @@ from django.core.management.base import BaseCommand
|
|||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
|
||||||
from apps.herd.models import Herd
|
from apps.herd.models import Herd
|
||||||
from apps.livestock.models import LiveStock, LiveStockType
|
from apps.livestock.models import LiveStock, LiveStockType, ExcelLiveStocks
|
||||||
from apps.tag.models import Tag, TemporaryTags
|
from apps.tag.models import Tag
|
||||||
from common.generics import parse_birthdate
|
from common.generics import parse_birthdate
|
||||||
|
|
||||||
BATCH_SIZE = 5000
|
BATCH_SIZE = 1000
|
||||||
CHUNK_SIZE = 10000
|
CHUNK_SIZE = 1000
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
@@ -22,16 +22,16 @@ class Command(BaseCommand):
|
|||||||
)
|
)
|
||||||
|
|
||||||
qs = (
|
qs = (
|
||||||
TemporaryTags.objects
|
ExcelLiveStocks.objects
|
||||||
.filter(sync_status__isnull=True)
|
.filter(sync_status__isnull=True)
|
||||||
.only('herd_code', 'birthdate', 'gender', 'tag')
|
.only('herd_code', 'birthdate', 'gender', 'national_id')
|
||||||
)
|
)
|
||||||
|
|
||||||
total = qs.count()
|
total = qs.count()
|
||||||
processed = 0
|
processed = 0
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
|
||||||
LOG_EVERY = 10000
|
LOG_EVERY = 1000
|
||||||
|
|
||||||
buffer = []
|
buffer = []
|
||||||
for temp in qs.iterator(chunk_size=CHUNK_SIZE):
|
for temp in qs.iterator(chunk_size=CHUNK_SIZE):
|
||||||
@@ -64,7 +64,7 @@ class Command(BaseCommand):
|
|||||||
self.stdout.write(self.style.SUCCESS("DONE ✅"))
|
self.stdout.write(self.style.SUCCESS("DONE ✅"))
|
||||||
|
|
||||||
def process_batch(self, temps):
|
def process_batch(self, temps):
|
||||||
herd_codes = {t.herd_code for t in temps if t.herd_code}
|
herd_codes = {self.normalize_herd_code(t.herd_code) for t in temps if t.herd_code}
|
||||||
|
|
||||||
herds = {
|
herds = {
|
||||||
h.code: h
|
h.code: h
|
||||||
@@ -90,7 +90,7 @@ class Command(BaseCommand):
|
|||||||
existing_tags = {
|
existing_tags = {
|
||||||
t.tag_code: t
|
t.tag_code: t
|
||||||
for t in Tag.objects.filter(
|
for t in Tag.objects.filter(
|
||||||
tag_code__in=[t.tag for t in temps if t.tag]
|
tag_code__in=[t.national_id for t in temps if t.national_id]
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -99,28 +99,28 @@ class Command(BaseCommand):
|
|||||||
new_tags = []
|
new_tags = []
|
||||||
|
|
||||||
for temp in temps:
|
for temp in temps:
|
||||||
herd = herds.get(temp.herd_code)
|
herd = herds.get(self.normalize_herd_code(temp.herd_code))
|
||||||
if not herd:
|
if not herd:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
birthdate = parse_birthdate(temp.birthdate)
|
birthdate = parse_birthdate(temp.birthdate)
|
||||||
gender = 1 if temp.gender == 'M' else 2
|
gender = 1 if temp.gender == 'M' else 2
|
||||||
livestock_type = livestock_types.get(temp.type)
|
livestock_type = livestock_types.get(temp.species)
|
||||||
weight_type = livestock_type.weight_type
|
weight_type = livestock_type.weight_type if livestock_type else 'L'
|
||||||
|
|
||||||
key = (temp.herd_code, birthdate, gender)
|
key = (self.normalize_herd_code(temp.herd_code), birthdate, gender)
|
||||||
livestock = livestock_map.get(key)
|
livestock = livestock_map.get(key)
|
||||||
|
|
||||||
if not livestock:
|
if not livestock:
|
||||||
if not temp.tag:
|
if not temp.national_id:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
tag = existing_tags.get(temp.tag)
|
tag = existing_tags.get(temp.national_id)
|
||||||
|
|
||||||
if not tag:
|
if not tag:
|
||||||
tag = Tag(tag_code=temp.tag, status='A')
|
tag = Tag(tag_code=temp.national_id, status='A')
|
||||||
new_tags.append(tag)
|
new_tags.append(tag)
|
||||||
existing_tags[temp.tag] = tag
|
existing_tags[temp.national_id] = tag
|
||||||
|
|
||||||
livestock = LiveStock(
|
livestock = LiveStock(
|
||||||
herd=herd,
|
herd=herd,
|
||||||
@@ -136,13 +136,13 @@ class Command(BaseCommand):
|
|||||||
temp.sync_status = 'S'
|
temp.sync_status = 'S'
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if livestock.tag is None and temp.tag:
|
if livestock.tag is None and temp.national_id:
|
||||||
tag = existing_tags.get(temp.tag)
|
tag = existing_tags.get(temp.national_id)
|
||||||
|
|
||||||
if not tag:
|
if not tag:
|
||||||
tag = Tag(tag_code=temp.tag, status='A')
|
tag = Tag(tag_code=temp.national_id, status='A')
|
||||||
new_tags.append(tag)
|
new_tags.append(tag)
|
||||||
existing_tags[temp.tag] = tag
|
existing_tags[temp.national_id] = tag
|
||||||
|
|
||||||
livestock.tag = tag
|
livestock.tag = tag
|
||||||
updated_livestock.append(livestock)
|
updated_livestock.append(livestock)
|
||||||
@@ -161,8 +161,13 @@ class Command(BaseCommand):
|
|||||||
['tag'],
|
['tag'],
|
||||||
batch_size=BATCH_SIZE
|
batch_size=BATCH_SIZE
|
||||||
)
|
)
|
||||||
TemporaryTags.objects.bulk_update(
|
ExcelLiveStocks.objects.bulk_update(
|
||||||
temps,
|
temps,
|
||||||
['sync_status'],
|
['sync_status'],
|
||||||
batch_size=BATCH_SIZE
|
batch_size=BATCH_SIZE
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def normalize_herd_code(self, value, length=10):
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
return str(value).strip().zfill(length)
|
||||||
|
|||||||
@@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 5.0 on 2026-02-07 07:29
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('authentication', '0060_organization_ownership_code'),
|
||||||
|
('tag', '0042_tagdistribution_parent_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='tagdistributionbatch',
|
||||||
|
name='owner_org',
|
||||||
|
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tag_distribution_batch', to='authentication.organization'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='tagdistributionbatch',
|
||||||
|
name='top_root_distribution',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 5.0 on 2026-02-08 07:27
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('tag', '0043_tagdistributionbatch_owner_org_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='tagdistributionbatch',
|
||||||
|
name='exit_doc_status',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='tagdistributionbatch',
|
||||||
|
name='warehouse_exit_doc',
|
||||||
|
field=models.CharField(max_length=350, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -138,6 +138,12 @@ class TagDistributionBatch(BaseModel):
|
|||||||
null=True
|
null=True
|
||||||
)
|
)
|
||||||
dist_batch_identity = models.CharField(max_length=20, default="0", unique=True, null=True)
|
dist_batch_identity = models.CharField(max_length=20, default="0", unique=True, null=True)
|
||||||
|
owner_org = models.ForeignKey(
|
||||||
|
Organization,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name='tag_distribution_batch',
|
||||||
|
null=True
|
||||||
|
)
|
||||||
assigner_org = models.ForeignKey(
|
assigner_org = models.ForeignKey(
|
||||||
Organization,
|
Organization,
|
||||||
on_delete=models.CASCADE,
|
on_delete=models.CASCADE,
|
||||||
@@ -163,6 +169,9 @@ class TagDistributionBatch(BaseModel):
|
|||||||
total_tag_count = models.IntegerField(default=0)
|
total_tag_count = models.IntegerField(default=0)
|
||||||
total_distributed_tag_count = models.PositiveBigIntegerField(default=0)
|
total_distributed_tag_count = models.PositiveBigIntegerField(default=0)
|
||||||
remaining_tag_count = models.PositiveBigIntegerField(default=0)
|
remaining_tag_count = models.PositiveBigIntegerField(default=0)
|
||||||
|
top_root_distribution = models.BooleanField(default=False)
|
||||||
|
warehouse_exit_doc = models.CharField(max_length=350, null=True)
|
||||||
|
exit_doc_status = models.BooleanField(default=False)
|
||||||
is_closed = models.BooleanField(default=False)
|
is_closed = models.BooleanField(default=False)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
|||||||
@@ -76,11 +76,13 @@ class TagDistributionService:
|
|||||||
# create distribution batch
|
# create distribution batch
|
||||||
distributions_batch = TagDistributionBatch.objects.create(
|
distributions_batch = TagDistributionBatch.objects.create(
|
||||||
parent=TagDistributionBatch.objects.get(id=data.get('parent')) if data.get('parent') else None,
|
parent=TagDistributionBatch.objects.get(id=data.get('parent')) if data.get('parent') else None,
|
||||||
|
owner_org=assigned_org,
|
||||||
assigner_org=org,
|
assigner_org=org,
|
||||||
assigned_org=assigned_org,
|
assigned_org=assigned_org,
|
||||||
total_tag_count=total_counted_tags,
|
total_tag_count=total_counted_tags,
|
||||||
dist_batch_identity=generate_unique_code(f"{random.randint(1000, 9999)}"),
|
dist_batch_identity=generate_unique_code(f"{random.randint(1000, 9999)}"),
|
||||||
distribution_type=distribution_type,
|
distribution_type=distribution_type,
|
||||||
|
top_root_distribution=True
|
||||||
)
|
)
|
||||||
distributions_batch.distributions.add(*distributions)
|
distributions_batch.distributions.add(*distributions)
|
||||||
|
|
||||||
@@ -186,7 +188,9 @@ class TagDistributionService:
|
|||||||
parent_tag_distribution = TagDistribution.objects.get(
|
parent_tag_distribution = TagDistribution.objects.get(
|
||||||
id=dist_data['parent_tag_distribution']
|
id=dist_data['parent_tag_distribution']
|
||||||
)
|
)
|
||||||
batch = TagBatch.objects.get(batch_identity=dist_data.get('batch_identity'))
|
batch = TagBatch.objects.get(
|
||||||
|
batch_identity=dist_data.get('batch_identity')
|
||||||
|
) if dist_data.get('batch_identity') else None
|
||||||
|
|
||||||
tags = Tag.objects.filter(
|
tags = Tag.objects.filter(
|
||||||
distributions__tag_distribution_batch=parent_batch,
|
distributions__tag_distribution_batch=parent_batch,
|
||||||
@@ -218,10 +222,11 @@ class TagDistributionService:
|
|||||||
|
|
||||||
dist_batch = TagDistributionBatch.objects.create(
|
dist_batch = TagDistributionBatch.objects.create(
|
||||||
parent=parent_batch,
|
parent=parent_batch,
|
||||||
|
owner_org=assigned_org,
|
||||||
assigner_org=org,
|
assigner_org=org,
|
||||||
assigned_org=assigned_org,
|
assigned_org=assigned_org,
|
||||||
total_tag_count=total_counted_tags,
|
total_tag_count=total_counted_tags,
|
||||||
distribution_type='batch',
|
distribution_type=parent_batch.distribution_type,
|
||||||
dist_batch_identity=generate_unique_code(
|
dist_batch_identity=generate_unique_code(
|
||||||
f"{random.randint(1000, 9999)}"
|
f"{random.randint(1000, 9999)}"
|
||||||
)
|
)
|
||||||
@@ -243,7 +248,7 @@ class TagDistributionService:
|
|||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
|
|
||||||
if tag_batch.assigner_org != org:
|
if tag_batch.assigner_org != org:
|
||||||
raise PermissionDenied("اجازه ویرایش این توزیع را ندارید")
|
raise PermissionDenied("اجازه ویرایش این توزیع را ندارید") # noqa
|
||||||
|
|
||||||
for dist in tag_batch.distributions.all():
|
for dist in tag_batch.distributions.all():
|
||||||
dist.tag.all().update(
|
dist.tag.all().update(
|
||||||
@@ -266,7 +271,9 @@ class TagDistributionService:
|
|||||||
parent_tag_distribution = TagDistribution.objects.get(
|
parent_tag_distribution = TagDistribution.objects.get(
|
||||||
id=dist_data['parent_tag_distribution']
|
id=dist_data['parent_tag_distribution']
|
||||||
)
|
)
|
||||||
batch = TagBatch.objects.get(batch_identity=dist_data.get('batch_identity'))
|
batch = TagBatch.objects.get(
|
||||||
|
batch_identity=dist_data.get('batch_identity')
|
||||||
|
) if dist_data.get('batch_identity') else None
|
||||||
count = dist_data['count']
|
count = dist_data['count']
|
||||||
|
|
||||||
tags = Tag.objects.filter(
|
tags = Tag.objects.filter(
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from django.db.models import Sum
|
||||||
|
from django.db.models.functions import Coalesce
|
||||||
from django.db.models.signals import m2m_changed
|
from django.db.models.signals import m2m_changed
|
||||||
from django.db.models.signals import post_save
|
from django.db.models.signals import post_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
@@ -15,14 +17,19 @@ def update_batch_on_distribution_change(
|
|||||||
if not instance.batch:
|
if not instance.batch:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if instance.parent:
|
||||||
|
return
|
||||||
|
|
||||||
batch = instance.batch
|
batch = instance.batch
|
||||||
|
|
||||||
distributions = TagDistribution.objects.filter(batch=batch)
|
distributions = TagDistribution.objects.filter(batch=batch)
|
||||||
|
|
||||||
distributed_tags = Tag.objects.filter(
|
distributed_tags = Tag.objects.filter(
|
||||||
distributions__batch=batch
|
distributions__batch=batch,
|
||||||
|
status__in=['R', 'A'],
|
||||||
).distinct().count()
|
).distinct().count()
|
||||||
|
|
||||||
|
print("distributed_tags", distributed_tags)
|
||||||
batch.total_distributed_tags = distributed_tags
|
batch.total_distributed_tags = distributed_tags
|
||||||
batch.total_remaining_tags = (
|
batch.total_remaining_tags = (
|
||||||
int(batch.request_number) - distributed_tags
|
int(batch.request_number) - distributed_tags
|
||||||
@@ -53,121 +60,16 @@ def calculate_tag_distribution_detail(sender, instance: TagDistributionBatch, **
|
|||||||
tag_dist_batch = instance
|
tag_dist_batch = instance
|
||||||
parent = tag_dist_batch.parent
|
parent = tag_dist_batch.parent
|
||||||
if parent:
|
if parent:
|
||||||
# parent.total_distributed_tag_count += tag_dist_batch.total_tag_count
|
parent.total_distributed_tag_count = parent.children.aggregate(
|
||||||
parent.remaining_tag_count = 20
|
total=Coalesce(Sum('total_tag_count'), 0)
|
||||||
print(parent.remaining_tag_count)
|
)['total']
|
||||||
parent.save(update_fields=['remaining_tag_count'])
|
parent.remaining_tag_count = (
|
||||||
|
parent.total_tag_count - parent.total_distributed_tag_count
|
||||||
|
)
|
||||||
|
parent.parent_flag = True
|
||||||
|
parent.save(update_fields=['remaining_tag_count', 'total_distributed_tag_count'])
|
||||||
|
|
||||||
|
if not getattr(instance, 'parent_flag', False):
|
||||||
tag_dist_batch.remaining_tag_count = tag_dist_batch.total_tag_count
|
tag_dist_batch.remaining_tag_count = tag_dist_batch.total_tag_count
|
||||||
instance.flag = True
|
instance.flag = True
|
||||||
tag_dist_batch.save(update_fields=['remaining_tag_count'])
|
tag_dist_batch.save(update_fields=['remaining_tag_count'])
|
||||||
|
|
||||||
# @receiver(m2m_changed, sender=TagDistribution.tag.through)
|
|
||||||
# def on_tags_added_to_distribution(sender, instance, action, pk_set, **kwargs):
|
|
||||||
# if action != 'post_add':
|
|
||||||
# return
|
|
||||||
#
|
|
||||||
# if not pk_set:
|
|
||||||
# return
|
|
||||||
#
|
|
||||||
# with transaction.atomic():
|
|
||||||
#
|
|
||||||
# Tag.objects.filter(
|
|
||||||
# id__in=pk_set
|
|
||||||
# ).update(
|
|
||||||
# status='R',
|
|
||||||
# organization=instance.assigned_org
|
|
||||||
# )
|
|
||||||
#
|
|
||||||
# total = instance.tag.count()
|
|
||||||
#
|
|
||||||
# instance.total_tag_count = total
|
|
||||||
# instance.distributed_number = total - instance.remaining_number
|
|
||||||
# instance.remaining_number = total - instance.distributed_number
|
|
||||||
# instance.save(update_fields=[
|
|
||||||
# 'total_tag_count',
|
|
||||||
# 'distributed_number',
|
|
||||||
# 'remaining_number'
|
|
||||||
# ])
|
|
||||||
#
|
|
||||||
# if instance.batch:
|
|
||||||
# batch = instance.batch
|
|
||||||
#
|
|
||||||
# distributed_tags = Tag.objects.filter(
|
|
||||||
# batches=batch,
|
|
||||||
# status__in=['R', 'A']
|
|
||||||
# ).count()
|
|
||||||
#
|
|
||||||
# total_tags = batch.tag.count()
|
|
||||||
#
|
|
||||||
# batch.total_distributed_tags = distributed_tags
|
|
||||||
# batch.total_remaining_tags = total_tags - distributed_tags
|
|
||||||
# batch.status = (
|
|
||||||
# 'distributed'
|
|
||||||
# if batch.total_remaining_tags == 0
|
|
||||||
# else 'created'
|
|
||||||
# )
|
|
||||||
#
|
|
||||||
# batch.save(update_fields=[
|
|
||||||
# 'total_distributed_tags',
|
|
||||||
# 'total_remaining_tags',
|
|
||||||
# 'status'
|
|
||||||
# ])
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# @receiver(m2m_changed, sender=TagDistribution.tag.through)
|
|
||||||
# def on_tags_removed_from_distribution(sender, instance, action, pk_set, **kwargs):
|
|
||||||
# if action not in ['post_remove', 'post_clear']:
|
|
||||||
# return
|
|
||||||
#
|
|
||||||
# if action == 'post_clear':
|
|
||||||
# pk_set = list(instance.tag.values_list('id', flat=True))
|
|
||||||
#
|
|
||||||
# if not pk_set:
|
|
||||||
# return
|
|
||||||
#
|
|
||||||
# with transaction.atomic():
|
|
||||||
#
|
|
||||||
# Tag.objects.filter(id__in=pk_set).update(
|
|
||||||
# status='R',
|
|
||||||
# organization=instance.assigner_org
|
|
||||||
# )
|
|
||||||
#
|
|
||||||
# total = instance.tag.count()
|
|
||||||
# instance.total_tag_count = total
|
|
||||||
# instance.distributed_number = total
|
|
||||||
# instance.remaining_number = 0
|
|
||||||
# instance.save(update_fields=[
|
|
||||||
# 'total_tag_count',
|
|
||||||
# 'distributed_number',
|
|
||||||
# 'remaining_number'
|
|
||||||
# ])
|
|
||||||
#
|
|
||||||
# if instance.batch:
|
|
||||||
# batch = instance.batch
|
|
||||||
# distributed_tags = Tag.objects.filter(
|
|
||||||
# batches=batch,
|
|
||||||
# status__in=['R', 'A']
|
|
||||||
# ).count()
|
|
||||||
# total_tags = batch.tag.count()
|
|
||||||
# batch.total_distributed_tags = distributed_tags
|
|
||||||
# batch.total_remaining_tags = total_tags - distributed_tags
|
|
||||||
# batch.status = 'distributed' if batch.total_remaining_tags == 0 else 'created'
|
|
||||||
# batch.save(update_fields=[
|
|
||||||
# 'total_distributed_tags',
|
|
||||||
# 'total_remaining_tags',
|
|
||||||
# 'status'
|
|
||||||
# ])
|
|
||||||
#
|
|
||||||
# for dist_batch in instance.tag_distribution_batch.all():
|
|
||||||
# total_dist = dist_batch.distributions.aggregate(
|
|
||||||
# total=Count('tag')
|
|
||||||
# ).get('total', 0)
|
|
||||||
# dist_batch.total_distributed_tag_count = total_dist
|
|
||||||
# dist_batch.remaining_tag_count = dist_batch.total_tag_count - total_dist
|
|
||||||
# dist_batch.is_closed = dist_batch.remaining_tag_count == 0
|
|
||||||
# dist_batch.save(update_fields=[
|
|
||||||
# 'total_distributed_tag_count',
|
|
||||||
# 'remaining_tag_count',
|
|
||||||
# 'is_closed'
|
|
||||||
# ])
|
|
||||||
|
|||||||
92
apps/tag/templates/pdf/tag_distribution.html
Normal file
92
apps/tag/templates/pdf/tag_distribution.html
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="fa" dir="rtl">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<style>
|
||||||
|
@page {
|
||||||
|
size: A4;
|
||||||
|
margin: 2cm;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: DejaVu Sans;
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 {
|
||||||
|
text-align: center;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.meta {
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.meta div {
|
||||||
|
margin-bottom: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
th, td {
|
||||||
|
border: 1px solid #333;
|
||||||
|
padding: 6px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
th {
|
||||||
|
background: #f0f0f0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer {
|
||||||
|
margin-top: 30px;
|
||||||
|
font-size: 10px;
|
||||||
|
text-align: center;
|
||||||
|
color: #666;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<h1>سند توزیع پلاک دام</h1>
|
||||||
|
|
||||||
|
<div class="meta">
|
||||||
|
<div><strong>شناسه توزیع:</strong> {{ batch.dist_batch_identity }}</div>
|
||||||
|
<div><strong>سازمان تخصیصدهنده:</strong> {{ batch.assigner_org.name }}</div>
|
||||||
|
<div><strong>سازمان دریافتکننده:</strong> {{ batch.assigned_org.name }}</div>
|
||||||
|
<div><strong>تاریخ ایجاد:</strong> {{ batch.create_date }}</div>
|
||||||
|
<div><strong>تعداد کل پلاک:</strong> {{ batch.total_tag_count }}</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table>
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>کد گونه</th>
|
||||||
|
<th>از سریال</th>
|
||||||
|
<th>تا سریال</th>
|
||||||
|
<th>تعداد کل</th>
|
||||||
|
<th>باقیمانده</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for dist in batch.distributions.all %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ dist.species_code }}</td>
|
||||||
|
<td>{{ dist.serial_from }}</td>
|
||||||
|
<td>{{ dist.serial_to }}</td>
|
||||||
|
<td>{{ dist.total_tag_count }}</td>
|
||||||
|
<td>{{ dist.remaining_number }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<div class="footer">
|
||||||
|
این سند به صورت سیستمی تولید شده و معتبر میباشد.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import random
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
@@ -21,6 +22,7 @@ from apps.tag.services.tag_distribution_services import TagDistributionService
|
|||||||
from apps.tag.services.tag_services import TagService
|
from apps.tag.services.tag_services import TagService
|
||||||
from common.helpers import get_organization_by_user
|
from common.helpers import get_organization_by_user
|
||||||
from common.liara_tools import upload_to_liara
|
from common.liara_tools import upload_to_liara
|
||||||
|
from common.storage import upload_to_storage
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
TagSerializer,
|
TagSerializer,
|
||||||
TagAssignmentSerializer,
|
TagAssignmentSerializer,
|
||||||
@@ -28,6 +30,9 @@ from .serializers import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# from weasyprint import HTML
|
||||||
|
|
||||||
|
|
||||||
class TagViewSet(BaseViewSet, TagService, SoftDeleteMixin, DynamicSearchMixin, viewsets.ModelViewSet):
|
class TagViewSet(BaseViewSet, TagService, SoftDeleteMixin, DynamicSearchMixin, viewsets.ModelViewSet):
|
||||||
""" Tag View Set """
|
""" Tag View Set """
|
||||||
queryset = tag_models.Tag.objects.all()
|
queryset = tag_models.Tag.objects.all()
|
||||||
@@ -297,8 +302,8 @@ class TagAssignmentViewSet(BaseViewSet, SoftDeleteMixin, DynamicSearchMixin, vie
|
|||||||
check_response = GeneralOTPViewSet().check_otp(request)
|
check_response = GeneralOTPViewSet().check_otp(request)
|
||||||
if check_response.status_code == 200:
|
if check_response.status_code == 200:
|
||||||
return Response(check_response.status_code, status=status.HTTP_200_OK)
|
return Response(check_response.status_code, status=status.HTTP_200_OK)
|
||||||
else:
|
|
||||||
return Response(check_response.status_code, status=status.HTTP_403_FORBIDDEN)
|
return Response(check_response.status_code, status=status.HTTP_403_FORBIDDEN)
|
||||||
|
return Response(status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
class AllocatedTagsViewSet(SoftDeleteMixin, viewsets.ModelViewSet):
|
class AllocatedTagsViewSet(SoftDeleteMixin, viewsets.ModelViewSet):
|
||||||
@@ -478,9 +483,9 @@ class TagDistributionViewSet(
|
|||||||
@action(
|
@action(
|
||||||
methods=['put'],
|
methods=['put'],
|
||||||
detail=True,
|
detail=True,
|
||||||
url_path='distribute_distribution',
|
url_path='edit_distribute_distribution',
|
||||||
url_name='distribute_distribution',
|
url_name='edit_distribute_distribution',
|
||||||
name='distribute_distribution',
|
name='edit_distribute_distribution',
|
||||||
)
|
)
|
||||||
def update_distribute_from_distribution(self, request, pk=None):
|
def update_distribute_from_distribution(self, request, pk=None):
|
||||||
"""
|
"""
|
||||||
@@ -558,9 +563,9 @@ class TagDistributionViewSet(
|
|||||||
|
|
||||||
class TagDistributionBatchViewSet(
|
class TagDistributionBatchViewSet(
|
||||||
BaseViewSet,
|
BaseViewSet,
|
||||||
|
viewsets.ModelViewSet,
|
||||||
SoftDeleteMixin,
|
SoftDeleteMixin,
|
||||||
DynamicSearchMixin,
|
DynamicSearchMixin,
|
||||||
viewsets.ModelViewSet,
|
|
||||||
TagDistributionService
|
TagDistributionService
|
||||||
):
|
):
|
||||||
queryset = tag_models.TagDistributionBatch.objects.all()
|
queryset = tag_models.TagDistributionBatch.objects.all()
|
||||||
@@ -578,8 +583,23 @@ class TagDistributionBatchViewSet(
|
|||||||
"""
|
"""
|
||||||
list of tag distribution batches
|
list of tag distribution batches
|
||||||
"""
|
"""
|
||||||
|
org = get_organization_by_user(request.user)
|
||||||
|
|
||||||
queryset = self.get_queryset(visibility_by_org_scope=True).filter(is_closed=False).order_by('-create_date')
|
queryset = self.get_queryset(
|
||||||
|
visibility_by_org_scope=True
|
||||||
|
).filter(
|
||||||
|
is_closed=False,
|
||||||
|
top_root_distribution=True,
|
||||||
|
).order_by('-create_date')
|
||||||
|
|
||||||
|
if not queryset:
|
||||||
|
queryset = self.get_queryset(
|
||||||
|
visibility_by_org_scope=True
|
||||||
|
).filter(
|
||||||
|
is_closed=False,
|
||||||
|
owner_org=org,
|
||||||
|
top_root_distribution=False,
|
||||||
|
).order_by('-create_date')
|
||||||
|
|
||||||
queryset = self.filter_query(self.filter_queryset(queryset))
|
queryset = self.filter_query(self.filter_queryset(queryset))
|
||||||
|
|
||||||
@@ -598,6 +618,26 @@ class TagDistributionBatchViewSet(
|
|||||||
serializer = self.serializer_class(distribution_batch)
|
serializer = self.serializer_class(distribution_batch)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
@action(
|
||||||
|
methods=['get'],
|
||||||
|
detail=True,
|
||||||
|
url_path='child_list',
|
||||||
|
url_name='child_list',
|
||||||
|
name='child_list'
|
||||||
|
)
|
||||||
|
def child_list(self, request, pk=None):
|
||||||
|
"""
|
||||||
|
list of all child from a tag distribution batch
|
||||||
|
"""
|
||||||
|
dist_batch = self.get_object()
|
||||||
|
queryset = dist_batch.children.all()
|
||||||
|
|
||||||
|
page = self.paginate_queryset(queryset)
|
||||||
|
if page is not None: # noqa
|
||||||
|
serializer = self.get_serializer(page, many=True)
|
||||||
|
return self.get_paginated_response(serializer.data)
|
||||||
|
return Response(self.serializer_class(queryset).data)
|
||||||
|
|
||||||
@action(
|
@action(
|
||||||
methods=['post'],
|
methods=['post'],
|
||||||
detail=True,
|
detail=True,
|
||||||
@@ -612,6 +652,8 @@ class TagDistributionBatchViewSet(
|
|||||||
dist_batch.is_closed = True
|
dist_batch.is_closed = True
|
||||||
dist_batch.save()
|
dist_batch.save()
|
||||||
dist_batch.distributions.all().update(is_closed=True) # close distributions of batch
|
dist_batch.distributions.all().update(is_closed=True) # close distributions of batch
|
||||||
|
for distribute in dist_batch.distributions.all():
|
||||||
|
distribute.tag.all().update(status='F')
|
||||||
|
|
||||||
return Response(status=status.HTTP_200_OK)
|
return Response(status=status.HTTP_200_OK)
|
||||||
|
|
||||||
@@ -670,3 +712,93 @@ class TagDistributionBatchViewSet(
|
|||||||
dashboard_data = self.distribution_batch_main_dashboard(org=org, is_closed=params.get('is_closed'))
|
dashboard_data = self.distribution_batch_main_dashboard(org=org, is_closed=params.get('is_closed'))
|
||||||
|
|
||||||
return Response(dashboard_data, status=status.HTTP_200_OK)
|
return Response(dashboard_data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
# @action(
|
||||||
|
# methods=['get'],
|
||||||
|
# detail=True,
|
||||||
|
# url_path='distribution_pdf_view',
|
||||||
|
# url_name='distribution_pdf_view',
|
||||||
|
# name='distribution_pdf_view',
|
||||||
|
# )
|
||||||
|
# def distribution_pdf_view(self, request, pk=None):
|
||||||
|
# batch = tag_models.TagDistributionBatch.objects.select_related(
|
||||||
|
# 'assigner_org', 'assigned_org'
|
||||||
|
# ).prefetch_related('distributions').get(id=pk)
|
||||||
|
#
|
||||||
|
# html_string = render_to_string(
|
||||||
|
# 'pdf/tag_distribution.html', # noqa
|
||||||
|
# {'batch': batch}
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# html = HTML(
|
||||||
|
# string=html_string,
|
||||||
|
# base_url=request.build_absolute_uri('/')
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# pdf = html.write_pdf()
|
||||||
|
#
|
||||||
|
# response = HttpResponse(pdf, content_type='application/pdf')
|
||||||
|
# response['Content-Disposition'] = (
|
||||||
|
# f'inline; filename="distribution_{batch.dist_batch_identity}.pdf"'
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# return response
|
||||||
|
|
||||||
|
@action(
|
||||||
|
methods=['post', ],
|
||||||
|
detail=True,
|
||||||
|
url_name='assign_document',
|
||||||
|
url_path='assign_document',
|
||||||
|
name='assign_document'
|
||||||
|
)
|
||||||
|
@transaction.atomic
|
||||||
|
def assign_document(self, request, pk=None):
|
||||||
|
""" set document for tag assignment """
|
||||||
|
|
||||||
|
# get tag assignment object & set document url
|
||||||
|
dist_batch = self.queryset.get(id=pk)
|
||||||
|
|
||||||
|
# upload document file to liara storage
|
||||||
|
document = request.FILES.get('dist_exit_document')
|
||||||
|
document_url = upload_to_storage(
|
||||||
|
document,
|
||||||
|
f'{random.randint(1000, 9999)}_distribution_batch_document.{str(document).split(".")[1]}'
|
||||||
|
)
|
||||||
|
dist_batch.warehouse_exit_doc = document_url
|
||||||
|
dist_batch.save(update_fields=['warehouse_exit_doc'])
|
||||||
|
serializer = self.serializer_class(dist_batch)
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
@action(
|
||||||
|
methods=['post'],
|
||||||
|
detail=True,
|
||||||
|
url_path='accept_exit_doc',
|
||||||
|
url_name='accept_exit_doc',
|
||||||
|
name='accept_exit_doc',
|
||||||
|
)
|
||||||
|
def accept_exit_doc(self, request, pk=None):
|
||||||
|
"""
|
||||||
|
accept exit document from warehouse on distribution batch
|
||||||
|
"""
|
||||||
|
|
||||||
|
dist_batch = self.get_object()
|
||||||
|
dist_batch.exit_doc_status = True
|
||||||
|
dist_batch.save(update_fields=['exit_doc_status'])
|
||||||
|
|
||||||
|
return Response(status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
def destroy(self, request, pk=None, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
delete tag distribution batch and free their tag from distribute
|
||||||
|
"""
|
||||||
|
|
||||||
|
dist_batch = self.get_object()
|
||||||
|
|
||||||
|
for distribute in dist_batch.distributions.all():
|
||||||
|
distribute.tag.all().update(status='F')
|
||||||
|
distribute.tag.clear()
|
||||||
|
distribute.soft_delete()
|
||||||
|
|
||||||
|
dist_batch.soft_delete()
|
||||||
|
|
||||||
|
return Response(status=status.HTTP_200_OK)
|
||||||
|
|||||||
@@ -281,8 +281,14 @@ class InventoryQuotaSaleTransactionViewSet(
|
|||||||
|
|
||||||
org = get_organization_by_user(request.user)
|
org = get_organization_by_user(request.user)
|
||||||
if org.free_visibility_by_scope:
|
if org.free_visibility_by_scope:
|
||||||
|
|
||||||
tr_objects = self.get_queryset(visibility_by_org_scope=True)
|
tr_objects = self.get_queryset(visibility_by_org_scope=True)
|
||||||
tr_item_objects = InventoryQuotaSaleItemViewSet().get_queryset(visibility_by_org_scope=True)
|
|
||||||
|
tr_item_view = InventoryQuotaSaleItemViewSet()
|
||||||
|
tr_item_view.request = request
|
||||||
|
tr_item_view.kwargs = {'pk': None}
|
||||||
|
tr_item_objects = tr_item_view.get_queryset(visibility_by_org_scope=True)
|
||||||
|
|
||||||
transaction_dashboard_data = self.get_dashboard(
|
transaction_dashboard_data = self.get_dashboard(
|
||||||
org,
|
org,
|
||||||
free_visibility_tr_objects=tr_objects,
|
free_visibility_tr_objects=tr_objects,
|
||||||
|
|||||||
@@ -1,4 +1,31 @@
|
|||||||
|
import boto3
|
||||||
|
from botocore.exceptions import NoCredentialsError
|
||||||
|
|
||||||
STORAGE_ENDPOINT = 'https://s3.rasadyar.com/rasaddam'
|
STORAGE_ENDPOINT = 'https://s3.rasadyar.com/rasaddam'
|
||||||
STORAGE_BUCKET_NAME = 'ticket-rasadyar'
|
STORAGE_BUCKET_NAME = 'ticket-rasadyar'
|
||||||
STORAGE_ACCESS_KEY = "zG3ewsbYsTqCmuws"
|
STORAGE_ACCESS_KEY = "zG3ewsbYsTqCmuws"
|
||||||
STORAGE_SECRET_KEY = 'RInUMB78zlQZp6CNf8+sRoSh2cNDHcGQhXrLnTJ1AuI='
|
STORAGE_SECRET_KEY = 'RInUMB78zlQZp6CNf8+sRoSh2cNDHcGQhXrLnTJ1AuI='
|
||||||
|
|
||||||
|
|
||||||
|
def upload_to_storage(file_obj, file_name):
|
||||||
|
try:
|
||||||
|
s3 = boto3.client(
|
||||||
|
's3',
|
||||||
|
endpoint_url=STORAGE_ENDPOINT,
|
||||||
|
aws_access_key_id=STORAGE_ACCESS_KEY,
|
||||||
|
aws_secret_access_key=STORAGE_SECRET_KEY
|
||||||
|
)
|
||||||
|
|
||||||
|
s3.upload_fileobj(
|
||||||
|
file_obj,
|
||||||
|
STORAGE_BUCKET_NAME,
|
||||||
|
file_name,
|
||||||
|
ExtraArgs={'ACL': 'public-read'} # دسترسی عمومی
|
||||||
|
)
|
||||||
|
|
||||||
|
return f"{STORAGE_ENDPOINT}/{STORAGE_BUCKET_NAME}/{file_name}"
|
||||||
|
|
||||||
|
except NoCredentialsError:
|
||||||
|
raise Exception("اعتبارنامههای AWS معتبر نیستند")
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"خطا در آپلود فایل: {e}")
|
||||||
|
|||||||
@@ -84,3 +84,4 @@ channels_redis
|
|||||||
daphne
|
daphne
|
||||||
django-jazzmin
|
django-jazzmin
|
||||||
python-dotenv
|
python-dotenv
|
||||||
|
weasyprint
|
||||||
Reference in New Issue
Block a user