Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 9a3bd413d6 | |||
| 2fa346e084 | |||
| ecd3e45632 | |||
| 48ec2df10d | |||
| bc61eeacd3 | |||
| a051071a1b | |||
| a0b939ccd7 | |||
| 0fc8fac5aa | |||
| 220c15b3fa |
24
brewman/alembic/versions/48af31eb6f3f_fp.py
Normal file
24
brewman/alembic/versions/48af31eb6f3f_fp.py
Normal file
@ -0,0 +1,24 @@
|
||||
"""FP
|
||||
|
||||
Revision ID: 48af31eb6f3f
|
||||
Revises: 12262aadbc08
|
||||
Create Date: 2023-08-07 13:01:05.401492
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '48af31eb6f3f'
|
||||
down_revision = '12262aadbc08'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_unique_constraint(op.f('uq_fingerprints_date'), 'fingerprints', ['date', 'employee_id'])
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_constraint(op.f('uq_fingerprints_date'), 'fingerprints', type_='unique')
|
||||
@ -1 +1 @@
|
||||
__version__ = "11.1.6"
|
||||
__version__ = "11.2.0"
|
||||
|
||||
@ -3,7 +3,7 @@ import uuid
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Uuid
|
||||
from sqlalchemy import DateTime, ForeignKey, UniqueConstraint, Uuid
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from ..db.base_class import reg
|
||||
@ -16,6 +16,7 @@ if TYPE_CHECKING:
|
||||
@reg.mapped_as_dataclass(unsafe_hash=True)
|
||||
class Fingerprint:
|
||||
__tablename__ = "fingerprints"
|
||||
__table_args__ = (UniqueConstraint("date", "employee_id"),)
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(Uuid, primary_key=True, insert_default=uuid.uuid4)
|
||||
employee_id: Mapped[uuid.UUID] = mapped_column(Uuid, ForeignKey("employees.id"), nullable=False)
|
||||
|
||||
@ -8,7 +8,7 @@ from io import StringIO
|
||||
import brewman.schemas.fingerprint as schemas
|
||||
|
||||
from fastapi import APIRouter, Depends, File, HTTPException, UploadFile, status
|
||||
from sqlalchemy import bindparam, select
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import Session
|
||||
@ -36,22 +36,13 @@ def upload_prints(
|
||||
for id_, code in db.execute(select(Employee.id, Employee.code)).all():
|
||||
employees[code] = id_
|
||||
file_data = read_file(fingerprints)
|
||||
prints = [d for d in fp(file_data, employees) if start <= d.date_.date() <= finish]
|
||||
prints = [d.model_dump() for d in fp(file_data, employees) if start <= d.date_.date() <= finish]
|
||||
for p in prints:
|
||||
p["id"] = p.pop("id_")
|
||||
paged_data = [prints[i : i + 100] for i in range(0, len(prints), 100)]
|
||||
for i, page in enumerate(paged_data):
|
||||
print(f"Processing page {i} of {len(paged_data)}")
|
||||
db.execute(
|
||||
pg_insert(Fingerprint)
|
||||
.values(
|
||||
{
|
||||
"id": bindparam("id"),
|
||||
"employee_id": bindparam("employee_id"),
|
||||
"date": bindparam("date"),
|
||||
}
|
||||
)
|
||||
.on_conflict_do_nothing(),
|
||||
[p.dict() for p in page],
|
||||
)
|
||||
db.execute(pg_insert(Fingerprint).on_conflict_do_nothing(), page)
|
||||
db.commit()
|
||||
except SQLAlchemyError as e:
|
||||
raise HTTPException(
|
||||
|
||||
@ -44,11 +44,13 @@ class ClientList(Client):
|
||||
|
||||
@field_validator("last_date", mode="before")
|
||||
@classmethod
|
||||
def parse_last_date(cls, value: datetime | str) -> datetime | None:
|
||||
def parse_last_date(cls, value: datetime | str | None) -> datetime | None:
|
||||
if value is None or value == "":
|
||||
return None
|
||||
if isinstance(value, datetime):
|
||||
return value
|
||||
return datetime.strptime(value, "%d-%b-%Y %H:%M")
|
||||
|
||||
@field_serializer("last_date")
|
||||
def serialize_last_date(self, value: datetime, info: FieldSerializationInfo) -> str:
|
||||
return value.strftime("%d-%b-%Y %H:%M")
|
||||
def serialize_last_date(self, value: datetime | None, info: FieldSerializationInfo) -> str | None:
|
||||
return None if value is None else value.strftime("%d-%b-%Y %H:%M")
|
||||
|
||||
@ -20,11 +20,11 @@ class Fingerprint(BaseModel):
|
||||
|
||||
@field_validator("date_", mode="before")
|
||||
@classmethod
|
||||
def parse_date(cls, value: date | str) -> date:
|
||||
if isinstance(value, date):
|
||||
def parse_date(cls, value: datetime | str) -> datetime:
|
||||
if isinstance(value, datetime):
|
||||
return value
|
||||
return datetime.strptime(value, "%d-%b-%Y").date()
|
||||
return datetime.strptime(value, "%d-%b-%Y %H:%M")
|
||||
|
||||
@field_serializer("date_")
|
||||
def serialize_date(self, value: date, info: FieldSerializationInfo) -> str:
|
||||
return value.strftime("%d-%b-%Y")
|
||||
return value.strftime("%d-%b-%Y %H:%M")
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "brewman"
|
||||
version = "11.1.6"
|
||||
version = "11.2.0"
|
||||
description = "Accounting plus inventory management for a restaurant."
|
||||
authors = ["tanshu <git@tanshu.com>"]
|
||||
|
||||
|
||||
@ -32,5 +32,5 @@ else
|
||||
echo "No version bump"
|
||||
fi
|
||||
cd "$parent_path/docker" || exit
|
||||
docker save brewman:latest | bzip2 | pv | ssh beacon 'bunzip2 | sudo docker load'
|
||||
docker save brewman:latest | bzip2 | pv | ssh gondor 'bunzip2 | sudo docker load'
|
||||
ansible-playbook --inventory hosts playbook.yml
|
||||
|
||||
@ -1,7 +1,5 @@
|
||||
---
|
||||
name: acc
|
||||
# host_directory: "brewman-{{ name }}"
|
||||
# db_name: "brewman_{{ name }}"
|
||||
|
||||
http_host: "acc.hopsngrains.com"
|
||||
http_conf: "acc.hopsngrains.com.conf"
|
||||
|
||||
@ -1,7 +1,5 @@
|
||||
---
|
||||
name: exp
|
||||
# host_directory: "brewman-{{ name }}"
|
||||
# db_name: "brewman_{{ name }}"
|
||||
|
||||
http_host: "exp.tanshu.com"
|
||||
http_conf: "exp.tanshu.com.conf"
|
||||
|
||||
@ -1,7 +1,5 @@
|
||||
---
|
||||
name: hinchco
|
||||
# host_directory: "brewman-{{ name }}"
|
||||
# db_name: "brewman_{{ name }}"
|
||||
|
||||
http_host: "acc.hinchco.in"
|
||||
http_conf: "acc.hinchco.in.conf"
|
||||
|
||||
@ -1,7 +1,5 @@
|
||||
---
|
||||
name: hops
|
||||
# host_directory: "brewman-{{ name }}"
|
||||
# db_name: "brewman_{{ name }}"
|
||||
|
||||
http_host: "hops.hopsngrains.com"
|
||||
http_conf: "hops.hopsngrains.com.conf"
|
||||
|
||||
@ -1,7 +1,5 @@
|
||||
---
|
||||
name: mhl
|
||||
# host_directory: "brewman-{{ name }}"
|
||||
# db_name: "brewman_{{ name }}"
|
||||
|
||||
http_host: "mhl.hopsngrains.com"
|
||||
http_conf: "mhl.hopsngrains.com.conf"
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "overlord",
|
||||
"version": "11.1.6",
|
||||
"version": "11.2.0",
|
||||
"scripts": {
|
||||
"ng": "ng",
|
||||
"start": "ng serve",
|
||||
|
||||
@ -193,10 +193,10 @@ export class IssueComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
this.voucher.inventories.push(
|
||||
new Inventory({
|
||||
quantity,
|
||||
rate: this.batch.rate,
|
||||
tax: this.batch.tax,
|
||||
discount: this.batch.discount,
|
||||
amount: quantity * this.batch.rate * (1 + this.batch.tax) * (1 - this.batch.discount),
|
||||
rate: +this.batch.rate,
|
||||
tax: +this.batch.tax,
|
||||
discount: +this.batch.discount,
|
||||
amount: quantity * +this.batch.rate * (1 + +this.batch.tax) * (1 - +this.batch.discount),
|
||||
batch: this.batch,
|
||||
}),
|
||||
);
|
||||
|
||||
@ -2,5 +2,5 @@ export const environment = {
|
||||
production: true,
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
ACCESS_TOKEN_REFRESH_MINUTES: 10, // refresh token 10 minutes before expiry
|
||||
version: '11.1.6',
|
||||
version: '11.2.0',
|
||||
};
|
||||
|
||||
@ -6,7 +6,7 @@ export const environment = {
|
||||
production: false,
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
ACCESS_TOKEN_REFRESH_MINUTES: 10, // refresh token 10 minutes before expiry
|
||||
version: '11.1.6',
|
||||
version: '11.2.0',
|
||||
};
|
||||
|
||||
/*
|
||||
|
||||
Reference in New Issue
Block a user