views.py 57 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099
  1. from rest_framework import viewsets, views
  2. import pandas as pd
  3. import numpy as np
  4. from utils.datasolve import data_validate
  5. from utils.datasolve import is_number
  6. from utils.md5 import Md5
  7. from goods.models import ListModel as goodslist
  8. from goodsunit.models import ListModel as goodsunit
  9. from goodsclass.models import ListModel as goodsclass
  10. from goodsbrand.models import ListModel as goodsbrand
  11. from goodscolor.models import ListModel as goodscolor
  12. from goodsshape.models import ListModel as goodsshape
  13. from goodsspecs.models import ListModel as goodsspecs
  14. from goodsorigin.models import ListModel as goodsorigin
  15. from goods import files as goodsfiles
  16. from supplier.models import ListModel as supplier
  17. from supplier import files as supplierfiles
  18. from customer.models import ListModel as customer
  19. from customer import files as customerfiles
  20. from payment.models import TransportationFeeListModel as freight
  21. from capital.models import ListModel as capital
  22. from scanner.models import ListModel as scanner
  23. from rest_framework.response import Response
  24. from rest_framework.exceptions import APIException
  25. from staff.models import ListModel as staff
  26. class GoodlistfileViewSet(views.APIView):
  27. """
  28. create:
  29. Upload One Excel(post)
  30. """
  31. pagination_class = []
  32. def get_queryset(self):
  33. if self.request.user:
  34. return goodslist.objects.filter(openid=self.request.auth.openid)
  35. else:
  36. return goodslist.objects.filter().none()
  37. def get_lang(self):
  38. if self.request.user:
  39. lang = self.request.META.get('HTTP_LANGUAGE')
  40. else:
  41. lang = 'en-US'
  42. if lang == 'zh-hans':
  43. data_header = goodsfiles.cn_data_header()
  44. elif lang == 'en-US':
  45. data_header = goodsfiles.en_data_header()
  46. else:
  47. data_header = goodsfiles.en_data_header()
  48. return data_header
  49. def post(self, request, *args, **kwargs):
  50. data_header = self.get_lang()
  51. files = self.request.FILES.get('file')
  52. if files:
  53. excel_type = files.name.split('.')[1]
  54. staff_name = staff.objects.filter(openid=self.request.auth.openid,
  55. id=self.request.META.get('HTTP_OPERATOR')).first().staff_name
  56. if excel_type in ['xlsx', 'xls', 'csv']:
  57. self.get_queryset().delete()
  58. goodsunit.objects.all().delete()
  59. goodsclass.objects.all().delete()
  60. goodsbrand.objects.all().delete()
  61. goodscolor.objects.all().delete()
  62. goodsshape.objects.all().delete()
  63. goodsspecs.objects.all().delete()
  64. goodsorigin.objects.all().delete()
  65. scanner.objects.filter(openid=self.request.auth.openid, mode='GOODS').delete()
  66. if excel_type == 'csv':
  67. df = pd.read_csv(files)
  68. else:
  69. df = pd.read_excel(files)
  70. df.drop_duplicates(keep='first', inplace=True)
  71. data_list = df.drop_duplicates(subset=[data_header.get('goods_code')], keep='first').values
  72. for d in range(len(data_list)):
  73. data_validate(str(data_list[d]))
  74. for i in range(len(data_list)):
  75. if str(data_list[i][0]) == 'nan':
  76. continue
  77. else:
  78. if str(data_list[i][1]) == 'nan':
  79. data_list[i][1] = 'N/A'
  80. if str(data_list[i][2]) == 'nan':
  81. data_list[i][2] = 'N/A'
  82. if is_number(str(data_list[i][3])):
  83. if str(data_list[i][3]) == 'nan':
  84. data_list[i][3] = 0
  85. else:
  86. data_list[i][3] = 0
  87. if is_number(str(data_list[i][4])):
  88. if str(data_list[i][4]) == 'nan':
  89. data_list[i][4] = 0
  90. else:
  91. data_list[i][4] = 0
  92. if is_number(str(data_list[i][5])):
  93. if str(data_list[i][5]) == 'nan':
  94. data_list[i][5] = 0
  95. else:
  96. data_list[i][5] = 0
  97. if is_number(str(data_list[i][6])):
  98. if str(data_list[i][6]) == 'nan':
  99. data_list[i][6] = 0
  100. else:
  101. data_list[i][6] = 0
  102. if is_number(str(data_list[i][7])):
  103. if str(data_list[i][7]) == 'nan':
  104. data_list[i][7] = 0
  105. else:
  106. data_list[i][7] = 0
  107. if str(data_list[i][8]) == 'nan':
  108. data_list[i][8] = 'N/A'
  109. if str(data_list[i][9]) == 'nan':
  110. data_list[i][9] = 'N/A'
  111. if str(data_list[i][10]) == 'nan':
  112. data_list[i][10] = 'N/A'
  113. if str(data_list[i][11]) == 'nan':
  114. data_list[i][11] = 'N/A'
  115. if str(data_list[i][12]) == 'nan':
  116. data_list[i][12] = 'N/A'
  117. if str(data_list[i][13]) == 'nan':
  118. data_list[i][13] = 'N/A'
  119. if str(data_list[i][14]) == 'nan':
  120. data_list[i][14] = 'N/A'
  121. if is_number(str(data_list[i][15])):
  122. if str(data_list[i][15]) == 'nan':
  123. data_list[i][15] = 0
  124. else:
  125. data_list[i][15] = 0
  126. if is_number(str(data_list[i][16])):
  127. if str(data_list[i][16]) == 'nan':
  128. data_list[i][16] = 0
  129. else:
  130. data_list[i][16] = 0
  131. bar_code = Md5.md5(str(data_list[i][0]).strip())
  132. goodslist.objects.create(openid=self.request.auth.openid,
  133. goods_code=str(data_list[i][0]).strip(),
  134. goods_desc=str(data_list[i][1]).strip(),
  135. goods_supplier=str(data_list[i][2]).strip(),
  136. goods_weight=data_list[i][3],
  137. goods_w=data_list[i][4],
  138. goods_d=data_list[i][5],
  139. goods_h=data_list[i][6],
  140. unit_volume=data_list[i][7],
  141. goods_unit=str(data_list[i][8]).strip(),
  142. goods_class=str(data_list[i][9]).strip(),
  143. goods_brand=str(data_list[i][10]).strip(),
  144. goods_color=str(data_list[i][11]).strip(),
  145. goods_shape=str(data_list[i][12]).strip(),
  146. goods_specs=str(data_list[i][13]).strip(),
  147. goods_origin=str(data_list[i][14]).strip(),
  148. goods_cost=data_list[i][15],
  149. goods_price=data_list[i][16],
  150. bar_code=bar_code,
  151. creater=str(staff_name)
  152. )
  153. scanner.objects.create(openid=self.request.auth.openid, mode="GOODS",
  154. code=str(data_list[i][0]).strip(),
  155. bar_code=bar_code)
  156. goods_supplier_list = df.drop_duplicates(subset=[data_header.get('goods_supplier')], keep='first').loc[
  157. :,
  158. data_header.get('goods_supplier')].values
  159. for i in goods_supplier_list:
  160. if str(i) == 'nan':
  161. i = 'N/A'
  162. if supplier.objects.filter(openid=self.request.auth.openid,
  163. supplier_name=str(i).strip()).exists():
  164. pass
  165. else:
  166. supplier.objects.create(openid=self.request.auth.openid,
  167. supplier_name=str(i).strip(),
  168. supplier_city="Supplier City",
  169. supplier_address="Supplier Address",
  170. supplier_contact="Supplier Contact",
  171. supplier_manager="Supplier Manager",
  172. creater=str(staff_name)
  173. )
  174. goods_unit_list = df.drop_duplicates(subset=[data_header.get('goods_unit')], keep='first').loc[:,
  175. data_header.get('goods_unit')].values
  176. for i in goods_unit_list:
  177. if str(i) == 'nan':
  178. i = 'N/A'
  179. if goodsunit.objects.filter(openid=self.request.auth.openid,
  180. goods_unit=str(i).strip()).exists():
  181. pass
  182. else:
  183. goodsunit.objects.create(openid=self.request.auth.openid,
  184. goods_unit=str(i).strip(),
  185. creater=str(staff_name)
  186. )
  187. goods_class_list = df.drop_duplicates(subset=[data_header.get('goods_class')], keep='first').loc[:,
  188. data_header.get('goods_class')].values
  189. for i in goods_class_list:
  190. if str(i) == 'nan':
  191. i = 'N/A'
  192. if goodsclass.objects.filter(openid=self.request.auth.openid,
  193. goods_class=str(i).strip()).exists():
  194. pass
  195. else:
  196. goodsclass.objects.create(openid=self.request.auth.openid,
  197. goods_class=str(i).strip(),
  198. creater=str(staff_name)
  199. )
  200. goods_brand_list = df.drop_duplicates(subset=[data_header.get('goods_brand')], keep='first').loc[:,
  201. data_header.get('goods_brand')].values
  202. for i in goods_brand_list:
  203. if str(i) == 'nan':
  204. i = 'N/A'
  205. if goodsbrand.objects.filter(openid=self.request.auth.openid,
  206. goods_brand=str(i).strip()).exists():
  207. pass
  208. else:
  209. goodsbrand.objects.create(openid=self.request.auth.openid,
  210. goods_brand=str(i).strip(),
  211. creater=str(staff_name)
  212. )
  213. goods_color_list = df.drop_duplicates(subset=[data_header.get('goods_color')], keep='first').loc[:,
  214. data_header.get('goods_color')].values
  215. for i in goods_color_list:
  216. if str(i) == 'nan':
  217. i = 'N/A'
  218. if goodscolor.objects.filter(openid=self.request.auth.openid,
  219. goods_color=str(i).strip()).exists():
  220. pass
  221. else:
  222. goodscolor.objects.create(openid=self.request.auth.openid,
  223. goods_color=str(i).strip(),
  224. creater=str(staff_name)
  225. )
  226. goods_shape_list = df.drop_duplicates(subset=[data_header.get('goods_shape')], keep='first').loc[:,
  227. data_header.get('goods_shape')].values
  228. for i in goods_shape_list:
  229. if str(i) == 'nan':
  230. i = 'N/A'
  231. if goodsshape.objects.filter(openid=self.request.auth.openid,
  232. goods_shape=str(i).strip()).exists():
  233. pass
  234. else:
  235. goodsshape.objects.create(openid=self.request.auth.openid,
  236. goods_shape=str(i).strip(),
  237. creater=str(staff_name)
  238. )
  239. goods_specs_list = df.drop_duplicates(subset=[data_header.get('goods_specs')], keep='first').loc[:,
  240. data_header.get('goods_specs')].values
  241. for i in goods_specs_list:
  242. if str(i) == 'nan':
  243. i = 'N/A'
  244. if goodsspecs.objects.filter(openid=self.request.auth.openid,
  245. goods_specs=str(i).strip()).exists():
  246. pass
  247. else:
  248. goodsspecs.objects.create(openid=self.request.auth.openid,
  249. goods_specs=str(i).strip(),
  250. creater=str(staff_name)
  251. )
  252. goods_origin_list = df.drop_duplicates(subset=[data_header.get('goods_origin')], keep='first').loc[:,
  253. data_header.get('goods_origin')].values
  254. for i in goods_origin_list:
  255. if str(i) == 'nan':
  256. i = 'N/A'
  257. if goodsorigin.objects.filter(openid=self.request.auth.openid,
  258. goods_origin=str(i).strip()).exists():
  259. pass
  260. else:
  261. goodsorigin.objects.create(openid=self.request.auth.openid,
  262. goods_origin=str(i).strip(),
  263. creater=str(staff_name)
  264. )
  265. else:
  266. raise APIException({"detail": "Can Not Support This File Type"})
  267. else:
  268. raise APIException({"detail": "Please Select One File"})
  269. return Response({"detail": "success"})
  270. class SupplierfileViewSet(views.APIView):
  271. """
  272. create:
  273. Upload One Excel(post)
  274. """
  275. pagination_class = []
  276. def get_queryset(self):
  277. if self.request.user:
  278. return supplier.objects.filter(openid=self.request.auth.openid)
  279. else:
  280. return supplier.objects.filter().none()
  281. def get_lang(self):
  282. if self.request.user:
  283. lang = self.request.META.get('HTTP_LANGUAGE')
  284. else:
  285. lang = 'en-US'
  286. if lang == 'zh-hans':
  287. data_header = supplierfiles.cn_data_header()
  288. elif lang == 'en-US':
  289. data_header = supplierfiles.en_data_header()
  290. else:
  291. data_header = supplierfiles.en_data_header()
  292. return data_header
  293. def post(self, request, *args, **kwargs):
  294. data_header = self.get_lang()
  295. files = self.request.FILES.get('file')
  296. if files:
  297. excel_type = files.name.split('.')[1]
  298. staff_name = staff.objects.filter(openid=self.request.auth.openid,
  299. id=self.request.META.get('HTTP_OPERATOR')).first().staff_name
  300. if excel_type in ['xlsx', 'xls', 'csv']:
  301. self.get_queryset().delete()
  302. if excel_type == 'csv':
  303. df = pd.read_csv(files)
  304. else:
  305. df = pd.read_excel(files)
  306. df.drop_duplicates(keep='first', inplace=True)
  307. data_list = df.drop_duplicates(subset=[data_header.get('supplier_name')], keep='first').values
  308. for d in range(len(data_list)):
  309. data_validate(str(data_list[d]))
  310. for i in range(len(data_list)):
  311. if str(data_list[i][0]) == 'nan':
  312. continue
  313. else:
  314. if str(data_list[i][1]) == 'nan':
  315. data_list[i][1] = 'N/A'
  316. if str(data_list[i][2]) == 'nan':
  317. data_list[i][2] = 'N/A'
  318. if is_number(str(data_list[i][3])):
  319. if str(data_list[i][3]) == 'nan':
  320. data_list[i][3] = 0
  321. else:
  322. data_list[i][3] = 0
  323. if str(data_list[i][4]) == 'nan':
  324. data_list[i][4] = 'N/A'
  325. if is_number(str(data_list[i][5])):
  326. if str(data_list[i][5]) == 'nan':
  327. data_list[i][5] = 0
  328. else:
  329. data_list[i][5] = 0
  330. supplier.objects.create(openid=self.request.auth.openid,
  331. supplier_name=str(data_list[i][0]).strip(),
  332. supplier_city=str(data_list[i][1]).strip(),
  333. supplier_address=str(data_list[i][2]).strip(),
  334. supplier_contact=data_list[i][3],
  335. supplier_manager=str(data_list[i][4]).strip(),
  336. supplier_level=data_list[i][5],
  337. creater=str(staff_name)
  338. )
  339. else:
  340. raise APIException({"detail": "Can Not Support This File Type"})
  341. else:
  342. raise APIException({"detail": "Please Select One File"})
  343. return Response({"detail": "success"})
  344. class CustomerfileViewSet(views.APIView):
  345. """
  346. create:
  347. Upload One Excel(post)
  348. """
  349. pagination_class = []
  350. def get_queryset(self):
  351. if self.request.user:
  352. return customer.objects.filter(openid=self.request.auth.openid)
  353. else:
  354. return customer.objects.filter().none()
  355. def get_lang(self):
  356. if self.request.user:
  357. lang = self.request.META.get('HTTP_LANGUAGE')
  358. else:
  359. lang = 'en-US'
  360. if lang == 'zh-hans':
  361. data_header = customerfiles.cn_data_header()
  362. elif lang == 'en-US':
  363. data_header = customerfiles.en_data_header()
  364. else:
  365. data_header = customerfiles.en_data_header()
  366. return data_header
  367. def post(self, request, *args, **kwargs):
  368. data_header = self.get_lang()
  369. files = self.request.FILES.get('file')
  370. if files:
  371. excel_type = files.name.split('.')[1]
  372. staff_name = staff.objects.filter(openid=self.request.auth.openid,
  373. id=self.request.META.get('HTTP_OPERATOR')).first().staff_name
  374. if excel_type in ['xlsx', 'xls', 'csv']:
  375. self.get_queryset().delete()
  376. if excel_type == 'csv':
  377. df = pd.read_csv(files)
  378. else:
  379. df = pd.read_excel(files)
  380. df.drop_duplicates(keep='first', inplace=True)
  381. data_list = df.drop_duplicates(subset=[data_header.get('customer_name')], keep='first').values
  382. for d in range(len(data_list)):
  383. data_validate(str(data_list[d]))
  384. for i in range(len(data_list)):
  385. if str(data_list[i][0]) == 'nan':
  386. continue
  387. else:
  388. if str(data_list[i][1]) == 'nan':
  389. data_list[i][1] = 'N/A'
  390. if str(data_list[i][2]) == 'nan':
  391. data_list[i][2] = 'N/A'
  392. if is_number(str(data_list[i][3])):
  393. if str(data_list[i][3]) == 'nan':
  394. data_list[i][3] = 0
  395. else:
  396. data_list[i][3] = 0
  397. if str(data_list[i][4]) == 'nan':
  398. data_list[i][4] = 'N/A'
  399. if is_number(str(data_list[i][5])):
  400. if str(data_list[i][5]) == 'nan':
  401. data_list[i][5] = 0
  402. else:
  403. data_list[i][5] = 0
  404. customer.objects.create(openid=self.request.auth.openid,
  405. customer_name=str(data_list[i][0]).strip(),
  406. customer_city=str(data_list[i][1]).strip(),
  407. customer_address=str(data_list[i][2]).strip(),
  408. customer_contact=data_list[i][3],
  409. customer_manager=str(data_list[i][4]).strip(),
  410. customer_level=data_list[i][5],
  411. creater=str(staff_name)
  412. )
  413. else:
  414. raise APIException({"detail": "Can Not Support This File Type"})
  415. else:
  416. raise APIException({"detail": "Please Select One File"})
  417. return Response({"detail": "success"})
  418. class CapitalfileViewSet(views.APIView):
  419. """
  420. create:
  421. Upload One Excel(post)
  422. """
  423. pagination_class = []
  424. def get_queryset(self):
  425. if self.request.user:
  426. return capital.objects.filter(openid=self.request.auth.openid)
  427. else:
  428. return capital.objects.filter().none()
  429. def post(self, request, *args, **kwargs):
  430. files = self.request.FILES.get('file')
  431. if files:
  432. excel_type = files.name.split('.')[1]
  433. staff_name = staff.objects.filter(openid=self.request.auth.openid,
  434. id=self.request.META.get('HTTP_OPERATOR')).first().staff_name
  435. if excel_type in ['xlsx', 'xls', 'csv']:
  436. self.get_queryset().delete()
  437. if excel_type == 'csv':
  438. df = pd.read_csv(files)
  439. else:
  440. df = pd.read_excel(files)
  441. data_list = df.drop_duplicates(keep='first', inplace=True)
  442. for d in range(len(data_list)):
  443. data_validate(str(data_list[d]))
  444. for i in range(len(data_list)):
  445. if str(data_list[i][0]) == 'nan':
  446. continue
  447. else:
  448. if is_number(str(data_list[i][1])):
  449. if str(data_list[i][1]) == 'nan':
  450. data_list[i][1] = 0
  451. else:
  452. data_list[i][1] = 0
  453. if is_number(str(data_list[i][2])):
  454. if str(data_list[i][2]) == 'nan':
  455. data_list[i][2] = 0
  456. else:
  457. data_list[i][2] = 0
  458. capital.objects.create(openid=self.request.auth.openid,
  459. capital_name=str(data_list[i][0]).strip(),
  460. capital_qty=data_list[i][1],
  461. capital_cost=data_list[i][2],
  462. creater=str(staff_name)
  463. )
  464. else:
  465. raise APIException({"detail": "Can Not Support This File Type"})
  466. else:
  467. raise APIException({"detail": "Please Select One File"})
  468. return Response({"detail": "success"})
  469. class FreightfileViewSet(views.APIView):
  470. """
  471. create:
  472. Upload One Excel(post)
  473. """
  474. pagination_class = []
  475. def get_queryset(self):
  476. if self.request.user:
  477. return freight.objects.filter(openid=self.request.auth.openid)
  478. else:
  479. return freight.objects.filter().none()
  480. def post(self, request, *args, **kwargs):
  481. files = self.request.FILES.get('file')
  482. if files:
  483. excel_type = files.name.split('.')[1]
  484. staff_name = staff.objects.filter(openid=self.request.auth.openid,
  485. id=self.request.META.get('HTTP_OPERATOR')).first().staff_name
  486. if excel_type in ['xlsx', 'xls', 'csv']:
  487. self.get_queryset().delete()
  488. if excel_type == 'csv':
  489. df = pd.read_csv(files)
  490. else:
  491. df = pd.read_excel(files)
  492. data_list = df.drop_duplicates(keep='first', inplace=True).values
  493. for d in range(len(data_list)):
  494. data_validate(str(data_list[d]))
  495. for i in range(len(data_list)):
  496. if str(data_list[i][0]) == 'nan':
  497. data_list[i][0] = 'N/A'
  498. if str(data_list[i][1]) == 'nan':
  499. data_list[i][1] = 'N/A'
  500. if is_number(str(data_list[i][2])):
  501. if str(data_list[i][2]) == 'nan':
  502. data_list[i][2] = 0
  503. else:
  504. data_list[i][2] = 0
  505. if is_number(str(data_list[i][3])):
  506. if str(data_list[i][3]) == 'nan':
  507. data_list[i][3] = 0
  508. else:
  509. data_list[i][3] = 0
  510. if is_number(str(data_list[i][4])):
  511. if str(data_list[i][4]) == 'nan':
  512. data_list[i][4] = 0
  513. else:
  514. data_list[i][4] = 0
  515. if str(data_list[i][5]) == 'nan':
  516. data_list[i][5] = 'N/A'
  517. freight.objects.create(openid=self.request.auth.openid,
  518. send_city=str(data_list[i][0]).strip(),
  519. receiver_city=str(data_list[i][1]).strip(),
  520. weight_fee=data_list[i][2],
  521. volume_fee=data_list[i][3],
  522. min_payment=data_list[i][4],
  523. transportation_supplier=str(data_list[i][5]).strip(),
  524. creater=str(staff_name)
  525. )
  526. else:
  527. raise APIException({"detail": "Can Not Support This File Type"})
  528. else:
  529. raise APIException({"detail": "Please Select One File"})
  530. return Response({"detail": "success"})
  531. class GoodlistfileAddViewSet(views.APIView):
  532. """
  533. create:
  534. Upload One Excel(post)
  535. """
  536. pagination_class = []
  537. def get_queryset(self):
  538. if self.request.user:
  539. return goodslist.objects.filter(openid=self.request.auth.openid)
  540. else:
  541. return goodslist.objects.filter().none()
  542. def get_lang(self):
  543. if self.request.user:
  544. lang = self.request.META.get('HTTP_LANGUAGE')
  545. else:
  546. lang = 'en-US'
  547. if lang == 'zh-hans':
  548. data_header = goodsfiles.cn_data_header()
  549. elif lang == 'en-US':
  550. data_header = goodsfiles.en_data_header()
  551. else:
  552. data_header = goodsfiles.en_data_header()
  553. return data_header
  554. def post(self, request, *args, **kwargs):
  555. data_header = self.get_lang()
  556. files = self.request.FILES.get('file')
  557. if files:
  558. excel_type = files.name.split('.')[1]
  559. staff_name = staff.objects.filter(openid=self.request.auth.openid,
  560. id=self.request.META.get('HTTP_OPERATOR')).first().staff_name
  561. if excel_type in ['xlsx', 'xls', 'csv']:
  562. if excel_type == 'csv':
  563. df = pd.read_csv(files)
  564. else:
  565. df = pd.read_excel(files)
  566. df.drop_duplicates(keep='first', inplace=True)
  567. data_list = df.drop_duplicates(subset=[data_header.get('goods_code')], keep='first').values
  568. for d in range(len(data_list)):
  569. data_validate(str(data_list[d]))
  570. for i in range(len(data_list)):
  571. if str(data_list[i][0]) == 'nan':
  572. continue
  573. else:
  574. if str(data_list[i][1]) == 'nan':
  575. data_list[i][1] = 'N/A'
  576. if str(data_list[i][2]) == 'nan':
  577. data_list[i][2] = 'N/A'
  578. if is_number(str(data_list[i][3])):
  579. if str(data_list[i][3]) == 'nan':
  580. data_list[i][3] = 0
  581. else:
  582. data_list[i][3] = 0
  583. if is_number(str(data_list[i][4])):
  584. if str(data_list[i][4]) == 'nan':
  585. data_list[i][4] = 0
  586. else:
  587. data_list[i][4] = 0
  588. if is_number(str(data_list[i][5])):
  589. if str(data_list[i][5]) == 'nan':
  590. data_list[i][5] = 0
  591. else:
  592. data_list[i][5] = 0
  593. if is_number(str(data_list[i][6])):
  594. if str(data_list[i][6]) == 'nan':
  595. data_list[i][6] = 0
  596. else:
  597. data_list[i][6] = 0
  598. if is_number(str(data_list[i][7])):
  599. if str(data_list[i][7]) == 'nan':
  600. data_list[i][7] = 0
  601. else:
  602. data_list[i][7] = 0
  603. if str(data_list[i][8]) == 'nan':
  604. data_list[i][8] = 'N/A'
  605. if str(data_list[i][9]) == 'nan':
  606. data_list[i][9] = 'N/A'
  607. if str(data_list[i][10]) == 'nan':
  608. data_list[i][10] = 'N/A'
  609. if str(data_list[i][11]) == 'nan':
  610. data_list[i][11] = 'N/A'
  611. if str(data_list[i][12]) == 'nan':
  612. data_list[i][12] = 'N/A'
  613. if str(data_list[i][13]) == 'nan':
  614. data_list[i][13] = 'N/A'
  615. if str(data_list[i][14]) == 'nan':
  616. data_list[i][14] = 'N/A'
  617. if is_number(str(data_list[i][15])):
  618. if str(data_list[i][15]) == 'nan':
  619. data_list[i][15] = 0
  620. else:
  621. data_list[i][15] = 0
  622. if is_number(str(data_list[i][16])):
  623. if str(data_list[i][16]) == 'nan':
  624. data_list[i][16] = 0
  625. else:
  626. data_list[i][16] = 0
  627. if goodslist.objects.filter(openid=self.request.auth.openid,
  628. goods_code=str(data_list[i][0]).strip()).exists():
  629. pass
  630. else:
  631. bar_code = Md5.md5(str(data_list[i][0]).strip())
  632. goodslist.objects.create(openid=self.request.auth.openid,
  633. goods_code=str(data_list[i][0]).strip(),
  634. goods_desc=str(data_list[i][1]).strip(),
  635. goods_supplier=str(data_list[i][2]).strip(),
  636. goods_weight=data_list[i][3],
  637. goods_w=data_list[i][4],
  638. goods_d=data_list[i][5],
  639. goods_h=data_list[i][6],
  640. unit_volume=data_list[i][7],
  641. goods_unit=str(data_list[i][8]).strip(),
  642. goods_class=str(data_list[i][9]).strip(),
  643. goods_brand=str(data_list[i][10]).strip(),
  644. goods_color=str(data_list[i][11]).strip(),
  645. goods_shape=str(data_list[i][12]).strip(),
  646. goods_specs=str(data_list[i][13]).strip(),
  647. goods_origin=str(data_list[i][14]).strip(),
  648. goods_cost=data_list[i][15],
  649. goods_price=data_list[i][16],
  650. bar_code=bar_code,
  651. creater=str(staff_name)
  652. )
  653. scanner.objects.create(openid=self.request.auth.openid, mode="GOODS",
  654. code=str(data_list[i][0]).strip(),
  655. bar_code=bar_code)
  656. goods_supplier_list = df.drop_duplicates(subset=[data_header.get('goods_supplier')], keep='first').loc[:,
  657. data_header.get('goods_supplier')].values
  658. for i in goods_supplier_list:
  659. if str(i) == 'nan':
  660. i = 'N/A'
  661. if supplier.objects.filter(openid=self.request.auth.openid,
  662. supplier_name=str(i).strip()).exists():
  663. pass
  664. else:
  665. supplier.objects.create(openid=self.request.auth.openid,
  666. supplier_name=str(i).strip(),
  667. supplier_city="Supplier City",
  668. supplier_address="Supplier Address",
  669. supplier_contact="Supplier Contact",
  670. supplier_manager="Supplier Manager",
  671. creater=str(staff_name)
  672. )
  673. goods_unit_list = df.drop_duplicates(subset=[data_header.get('goods_unit')], keep='first').loc[:,
  674. data_header.get('goods_unit')].values
  675. for i in goods_unit_list:
  676. if str(i) == 'nan':
  677. i = 'N/A'
  678. if goodsunit.objects.filter(openid=self.request.auth.openid,
  679. goods_unit=str(i).strip()).exists():
  680. pass
  681. else:
  682. goodsunit.objects.create(openid=self.request.auth.openid,
  683. goods_unit=str(i).strip(),
  684. creater=str(staff_name)
  685. )
  686. goods_class_list = df.drop_duplicates(subset=[data_header.get('goods_class')], keep='first').loc[:,
  687. data_header.get('goods_class')].values
  688. for i in goods_class_list:
  689. if str(i) == 'nan':
  690. i = 'N/A'
  691. if goodsclass.objects.filter(openid=self.request.auth.openid,
  692. goods_class=str(i).strip()).exists():
  693. pass
  694. else:
  695. goodsclass.objects.create(openid=self.request.auth.openid,
  696. goods_class=str(i).strip(),
  697. creater=str(staff_name)
  698. )
  699. goods_brand_list = df.drop_duplicates(subset=[data_header.get('goods_brand')], keep='first').loc[:,
  700. data_header.get('goods_brand')].values
  701. for i in goods_brand_list:
  702. if str(i) == 'nan':
  703. i = 'N/A'
  704. if goodsbrand.objects.filter(openid=self.request.auth.openid,
  705. goods_brand=str(i).strip()).exists():
  706. pass
  707. else:
  708. goodsbrand.objects.create(openid=self.request.auth.openid,
  709. goods_brand=str(i).strip(),
  710. creater=str(staff_name)
  711. )
  712. goods_color_list = df.drop_duplicates(subset=[data_header.get('goods_color')], keep='first').loc[:,
  713. data_header.get('goods_color')].values
  714. for i in goods_color_list:
  715. if str(i) == 'nan':
  716. i = 'N/A'
  717. if goodscolor.objects.filter(openid=self.request.auth.openid,
  718. goods_color=str(i).strip()).exists():
  719. pass
  720. else:
  721. goodscolor.objects.create(openid=self.request.auth.openid,
  722. goods_color=str(i).strip(),
  723. creater=str(staff_name)
  724. )
  725. goods_shape_list = df.drop_duplicates(subset=[data_header.get('goods_shape')], keep='first').loc[:,
  726. data_header.get('goods_shape')].values
  727. for i in goods_shape_list:
  728. if str(i) == 'nan':
  729. i = 'N/A'
  730. if goodsshape.objects.filter(openid=self.request.auth.openid,
  731. goods_shape=str(i).strip()).exists():
  732. pass
  733. else:
  734. goodsshape.objects.create(openid=self.request.auth.openid,
  735. goods_shape=str(i).strip(),
  736. creater=str(staff_name)
  737. )
  738. goods_specs_list = df.drop_duplicates(subset=[data_header.get('goods_specs')], keep='first').loc[:,
  739. data_header.get('goods_specs')].values
  740. for i in goods_specs_list:
  741. if str(i) == 'nan':
  742. i = 'N/A'
  743. if goodsspecs.objects.filter(openid=self.request.auth.openid,
  744. goods_specs=str(i).strip()).exists():
  745. pass
  746. else:
  747. goodsspecs.objects.create(openid=self.request.auth.openid,
  748. goods_specs=str(i).strip(),
  749. creater=str(staff_name)
  750. )
  751. goods_origin_list = df.drop_duplicates(subset=[data_header.get('goods_origin')], keep='first').loc[:,
  752. data_header.get('goods_origin')].values
  753. for i in goods_origin_list:
  754. if str(i) == 'nan':
  755. i = 'N/A'
  756. if goodsorigin.objects.filter(openid=self.request.auth.openid,
  757. goods_origin=str(i).strip()).exists():
  758. pass
  759. else:
  760. goodsorigin.objects.create(openid=self.request.auth.openid,
  761. goods_origin=str(i).strip(),
  762. creater=str(staff_name)
  763. )
  764. else:
  765. raise APIException({"detail": "Can Not Support This File Type"})
  766. else:
  767. raise APIException({"detail": "Please Select One File"})
  768. return Response({"detail": "success"})
  769. class SupplierfileAddViewSet(views.APIView):
  770. """
  771. create:
  772. Upload One Excel(post)
  773. """
  774. pagination_class = []
  775. def get_queryset(self):
  776. if self.request.user:
  777. return supplier.objects.filter(openid=self.request.auth.openid)
  778. else:
  779. return supplier.objects.filter().none()
  780. def get_lang(self):
  781. if self.request.user:
  782. lang = self.request.META.get('HTTP_LANGUAGE')
  783. else:
  784. lang = 'en-US'
  785. if lang == 'zh-hans':
  786. data_header = supplierfiles.cn_data_header()
  787. elif lang == 'en-US':
  788. data_header = supplierfiles.en_data_header()
  789. else:
  790. data_header = supplierfiles.en_data_header()
  791. return data_header
  792. def post(self, request, *args, **kwargs):
  793. data_header = self.get_lang()
  794. files = self.request.FILES.get('file')
  795. if files:
  796. excel_type = files.name.split('.')[1]
  797. staff_name = staff.objects.filter(openid=self.request.auth.openid,
  798. id=self.request.META.get('HTTP_OPERATOR')).first().staff_name
  799. if excel_type in ['xlsx', 'xls', 'csv']:
  800. if excel_type == 'csv':
  801. df = pd.read_csv(files)
  802. else:
  803. df = pd.read_excel(files)
  804. df.drop_duplicates(keep='first', inplace=True)
  805. data_list = df.drop_duplicates(subset=[data_header.get('supplier_name')], keep='first').values
  806. for d in range(len(data_list)):
  807. data_validate(str(data_list[d]))
  808. for i in range(len(data_list)):
  809. if str(data_list[i][0]) == 'nan':
  810. continue
  811. else:
  812. if str(data_list[i][1]) == 'nan':
  813. data_list[i][1] = 'N/A'
  814. if str(data_list[i][2]) == 'nan':
  815. data_list[i][2] = 'N/A'
  816. if is_number(str(data_list[i][3])):
  817. if str(data_list[i][3]) == 'nan':
  818. data_list[i][3] = 0
  819. else:
  820. data_list[i][3] = 0
  821. if str(data_list[i][4]) == 'nan':
  822. data_list[i][4] = 'N/A'
  823. if is_number(str(data_list[i][5])):
  824. if str(data_list[i][5]) == 'nan':
  825. data_list[i][5] = 0
  826. else:
  827. data_list[i][5] = 0
  828. if supplier.objects.filter(openid=self.request.auth.openid,
  829. supplier_name=str(data_list[i][0]).strip(),
  830. supplier_city=str(data_list[i][1]).strip(),
  831. supplier_address=str(data_list[i][2]).strip(),
  832. supplier_contact=data_list[i][3],
  833. supplier_manager=str(data_list[i][4]).strip(),
  834. supplier_level=data_list[i][5],
  835. creater=str(staff_name)
  836. ).exists():
  837. pass
  838. else:
  839. supplier.objects.create(openid=self.request.auth.openid,
  840. supplier_name=str(data_list[i][0]).strip(),
  841. supplier_city=str(data_list[i][1]).strip(),
  842. supplier_address=str(data_list[i][2]).strip(),
  843. supplier_contact=data_list[i][3],
  844. supplier_manager=str(data_list[i][4]).strip(),
  845. supplier_level=data_list[i][5],
  846. creater=str(staff_name)
  847. )
  848. else:
  849. raise APIException({"detail": "Can Not Support This File Type"})
  850. else:
  851. raise APIException({"detail": "Please Select One File"})
  852. return Response({"detail": "success"})
  853. class CustomerfileAddViewSet(views.APIView):
  854. """
  855. create:
  856. Upload One Excel(post)
  857. """
  858. pagination_class = []
  859. def get_queryset(self):
  860. if self.request.user:
  861. return customer.objects.filter(openid=self.request.auth.openid)
  862. else:
  863. return customer.objects.filter().none()
  864. def get_lang(self):
  865. if self.request.user:
  866. lang = self.request.META.get('HTTP_LANGUAGE')
  867. else:
  868. lang = 'en-US'
  869. if lang == 'zh-hans':
  870. data_header = customerfiles.cn_data_header()
  871. elif lang == 'en-US':
  872. data_header = customerfiles.en_data_header()
  873. else:
  874. data_header = customerfiles.en_data_header()
  875. return data_header
  876. def post(self, request, *args, **kwargs):
  877. data_header = self.get_lang()
  878. files = self.request.FILES.get('file')
  879. if files:
  880. excel_type = files.name.split('.')[1]
  881. staff_name = staff.objects.filter(openid=self.request.auth.openid,
  882. id=self.request.META.get('HTTP_OPERATOR')).first().staff_name
  883. if excel_type in ['xlsx', 'xls', 'csv']:
  884. if excel_type == 'csv':
  885. df = pd.read_csv(files)
  886. else:
  887. df = pd.read_excel(files)
  888. df.drop_duplicates(keep='first', inplace=True)
  889. data_list = df.drop_duplicates(subset=[data_header.get('customer_name')], keep='first').values
  890. for d in range(len(data_list)):
  891. data_validate(str(data_list[d]))
  892. for i in range(len(data_list)):
  893. if str(data_list[i][0]) == 'nan':
  894. continue
  895. else:
  896. if str(data_list[i][1]) == 'nan':
  897. data_list[i][1] = 'N/A'
  898. if str(data_list[i][2]) == 'nan':
  899. data_list[i][2] = 'N/A'
  900. if is_number(str(data_list[i][3])):
  901. if str(data_list[i][3]) == 'nan':
  902. data_list[i][3] = 0
  903. else:
  904. data_list[i][3] = 0
  905. if str(data_list[i][4]) == 'nan':
  906. data_list[i][4] = 'N/A'
  907. if is_number(str(data_list[i][5])):
  908. if str(data_list[i][5]) == 'nan':
  909. data_list[i][5] = 0
  910. else:
  911. data_list[i][5] = 0
  912. if customer.objects.filter(openid=self.request.auth.openid,
  913. customer_name=str(data_list[i][0]).strip(),
  914. customer_city=str(data_list[i][1]).strip(),
  915. customer_address=str(data_list[i][2]).strip(),
  916. customer_contact=data_list[i][3],
  917. customer_manager=str(data_list[i][4]).strip(),
  918. customer_level=data_list[i][5],
  919. ).exists():
  920. pass
  921. else:
  922. customer.objects.create(openid=self.request.auth.openid,
  923. customer_name=str(data_list[i][0]).strip(),
  924. customer_city=str(data_list[i][1]).strip(),
  925. customer_address=str(data_list[i][2]).strip(),
  926. customer_contact=data_list[i][3],
  927. customer_manager=str(data_list[i][4]).strip(),
  928. customer_level=data_list[i][5],
  929. creater=str(staff_name)
  930. )
  931. else:
  932. raise APIException({"detail": "Can Not Support This File Type"})
  933. else:
  934. raise APIException({"detail": "Please Select One File"})
  935. return Response({"detail": "success"})
  936. class CapitalfileAddViewSet(views.APIView):
  937. """
  938. create:
  939. Upload One Excel(post)
  940. """
  941. pagination_class = []
  942. def get_queryset(self):
  943. if self.request.user:
  944. return capital.objects.filter(openid=self.request.auth.openid)
  945. else:
  946. return capital.objects.filter().none()
  947. def post(self, request, *args, **kwargs):
  948. files = self.request.FILES.get('file')
  949. if files:
  950. excel_type = files.name.split('.')[1]
  951. staff_name = staff.objects.filter(openid=self.request.auth.openid,
  952. id=self.request.META.get('HTTP_OPERATOR')).first().staff_name
  953. if excel_type in ['xlsx', 'xls', 'csv']:
  954. if excel_type == 'csv':
  955. df = pd.read_csv(files)
  956. else:
  957. df = pd.read_excel(files)
  958. data_list = df.drop_duplicates(keep='first', inplace=True)
  959. for d in range(len(data_list)):
  960. data_validate(str(data_list[d]))
  961. for i in range(len(data_list)):
  962. if str(data_list[i][0]) == 'nan':
  963. continue
  964. else:
  965. if is_number(str(data_list[i][1])):
  966. if str(data_list[i][1]) == 'nan':
  967. data_list[i][1] = 0
  968. else:
  969. data_list[i][1] = 0
  970. if is_number(str(data_list[i][2])):
  971. if str(data_list[i][2]) == 'nan':
  972. data_list[i][2] = 0
  973. else:
  974. data_list[i][2] = 0
  975. if capital.objects.filter(openid=self.request.auth.openid,
  976. capital_name=str(data_list[i][0]).strip(),
  977. capital_qty=data_list[i][1],
  978. capital_cost=data_list[i][2],
  979. ).exists():
  980. pass
  981. else:
  982. capital.objects.create(openid=self.request.auth.openid,
  983. capital_name=str(data_list[i][0]).strip(),
  984. capital_qty=data_list[i][1],
  985. capital_cost=data_list[i][2],
  986. creater=str(staff_name)
  987. )
  988. else:
  989. raise APIException({"detail": "Can Not Support This File Type"})
  990. else:
  991. raise APIException({"detail": "Please Select One File"})
  992. return Response({"detail": "success"})
  993. class FreightfileAddViewSet(views.APIView):
  994. """
  995. create:
  996. Upload One Excel(post)
  997. """
  998. pagination_class = []
  999. def get_queryset(self):
  1000. if self.request.user:
  1001. return freight.objects.filter(openid=self.request.auth.openid)
  1002. else:
  1003. return freight.objects.filter().none()
  1004. def post(self, request, *args, **kwargs):
  1005. files = self.request.FILES.get('file')
  1006. if files:
  1007. excel_type = files.name.split('.')[1]
  1008. staff_name = staff.objects.filter(openid=self.request.auth.openid,
  1009. id=self.request.META.get('HTTP_OPERATOR')).first().staff_name
  1010. if excel_type in ['xlsx', 'xls', 'csv']:
  1011. if excel_type == 'csv':
  1012. df = pd.read_csv(files)
  1013. else:
  1014. df = pd.read_excel(files)
  1015. data_list = df.drop_duplicates(keep='first', inplace=True).values
  1016. for d in range(len(data_list)):
  1017. data_validate(str(data_list[d]))
  1018. for i in range(len(data_list)):
  1019. if str(data_list[i][0]) == 'nan':
  1020. data_list[i][0] = 'N/A'
  1021. if str(data_list[i][1]) == 'nan':
  1022. data_list[i][1] = 'N/A'
  1023. if is_number(str(data_list[i][2])):
  1024. if str(data_list[i][2]) == 'nan':
  1025. data_list[i][2] = 0
  1026. else:
  1027. data_list[i][2] = 0
  1028. if is_number(str(data_list[i][3])):
  1029. if str(data_list[i][3]) == 'nan':
  1030. data_list[i][3] = 0
  1031. else:
  1032. data_list[i][3] = 0
  1033. if is_number(str(data_list[i][4])):
  1034. if str(data_list[i][4]) == 'nan':
  1035. data_list[i][4] = 0
  1036. else:
  1037. data_list[i][4] = 0
  1038. if str(data_list[i][5]) == 'nan':
  1039. data_list[i][5] = 'N/A'
  1040. if freight.objects.filter(openid=self.request.auth.openid,
  1041. send_city=str(data_list[i][0]).strip(),
  1042. receiver_city=str(data_list[i][1]).strip(),
  1043. weight_fee=data_list[i][2],
  1044. volume_fee=data_list[i][3],
  1045. min_payment=data_list[i][4],
  1046. transportation_supplier=str(data_list[i][5]).strip()
  1047. ).exists():
  1048. pass
  1049. else:
  1050. freight.objects.create(openid=self.request.auth.openid,
  1051. send_city=str(data_list[i][0]).strip(),
  1052. receiver_city=str(data_list[i][1]).strip(),
  1053. weight_fee=data_list[i][2],
  1054. volume_fee=data_list[i][3],
  1055. min_payment=data_list[i][4],
  1056. transportation_supplier=str(data_list[i][5]).strip(),
  1057. creater=str(staff_name)
  1058. )
  1059. else:
  1060. raise APIException({"detail": "Can Not Support This File Type"})
  1061. else:
  1062. raise APIException({"detail": "Please Select One File"})
  1063. return Response({"detail": "success"})