File packages/shipment_finish.py changed (mode: 100644) (index c247c2f..3b1da50) |
... |
... |
line = "#"*70 |
16 |
16 |
altline = "-"*70 |
altline = "-"*70 |
17 |
17 |
|
|
18 |
18 |
# Define a string containing the day+month of the current date |
# Define a string containing the day+month of the current date |
19 |
|
todatstr = datetime.datetime.now().strftime("%d-%m") |
|
|
19 |
|
todaystr = datetime.datetime.now().strftime("%d-%m") |
20 |
20 |
|
|
21 |
21 |
def finishShipment(picklist, folder): |
def finishShipment(picklist, folder): |
22 |
22 |
# keys: barcode, packages : { package : {qty, location, packet} } |
# keys: barcode, packages : { package : {qty, location, packet} } |
|
... |
... |
def finishShipment(picklist, folder): |
72 |
72 |
packages = {} |
packages = {} |
73 |
73 |
|
|
74 |
74 |
for index, row in enumerate( shipped_items[ sku ] ): |
for index, row in enumerate( shipped_items[ sku ] ): |
75 |
|
packages[int( index )] = {'quantity':row['Versand'], |
|
|
75 |
|
packages[int( index )] = {'sku' : sku, |
|
76 |
|
'quantity':row['Versand'], |
76 |
77 |
'packet':row['Kiste'], |
'packet':row['Kiste'], |
77 |
78 |
'location':row['location']} |
'location':row['location']} |
78 |
79 |
|
|
|
... |
... |
def finishShipment(picklist, folder): |
87 |
88 |
|
|
88 |
89 |
# Check the values of Data TEST |
# Check the values of Data TEST |
89 |
90 |
#for row in Data: |
#for row in Data: |
|
91 |
|
# last_sku = '' |
90 |
92 |
# print("SKU: {0}, Barcode: {1}\n".format( row, Data[ row ]['barcode'] )) |
# print("SKU: {0}, Barcode: {1}\n".format( row, Data[ row ]['barcode'] )) |
91 |
93 |
# for package in Data[ row ]['packages']: |
# for package in Data[ row ]['packages']: |
92 |
|
# print("QTY: {0}, PACKET: {1}, from LOCATION: {2}\n" |
|
|
94 |
|
# if(Data[row]['packages'][ package ]['sku'] == last_sku): |
|
95 |
|
# print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") |
|
96 |
|
# if(not(last_sku)): |
|
97 |
|
# last_sku = Data[row]['packages'][ package ]['sku'] |
|
98 |
|
# print("SKU: {3}, QTY: {0}, PACKET: {1}, from LOCATION: {2}\n" |
93 |
99 |
# .format(Data[row]['packages'][ package ]['quantity'], |
# .format(Data[row]['packages'][ package ]['quantity'], |
94 |
100 |
# Data[row]['packages'][ package ]['packet'], |
# Data[row]['packages'][ package ]['packet'], |
95 |
|
# Data[row]['packages'][ package ]['location'])) |
|
|
101 |
|
# Data[row]['packages'][ package ]['location'], |
|
102 |
|
# Data[row]['packages'][ package ]['sku'])) |
96 |
103 |
|
|
97 |
104 |
#Filling out a inbound plan from amazon |
#Filling out a inbound plan from amazon |
98 |
105 |
createInboundPlan(dataset=Data, folder=folder) |
createInboundPlan(dataset=Data, folder=folder) |
|
... |
... |
def createInboundPlan(dataset, folder): |
170 |
177 |
|
|
171 |
178 |
if(os.path.isfile(inboundplan['filepath'])): |
if(os.path.isfile(inboundplan['filepath'])): |
172 |
179 |
print("{0}\n".format(line)) |
print("{0}\n".format(line)) |
173 |
|
print("File was created at\n{0}.\n".format(folder.replace("Report", "Upload")) |
|
|
180 |
|
print("File was created at\n{0}.\n".format(folder.replace("Report", "Upload"))) |
174 |
181 |
print("Please make sure to fill out all delivery data.\n") |
print("Please make sure to fill out all delivery data.\n") |
175 |
182 |
print("{0}\n".format(altline)) |
print("{0}\n".format(altline)) |
176 |
|
print("Datei wurde erstellt in\n{0}.\n".format(folder.replace("Report", "Upload")) |
|
|
183 |
|
print("Datei wurde erstellt in\n{0}.\n".format(folder.replace("Report", "Upload"))) |
177 |
184 |
print("Bitte stelle sicher das alle Versandsinformationen eingetragen sind.\n") |
print("Bitte stelle sicher das alle Versandsinformationen eingetragen sind.\n") |
178 |
185 |
else: |
else: |
179 |
186 |
print("{0}\n".format(line)) |
print("{0}\n".format(line)) |
|
... |
... |
def reducePlentyMarketsQty(dataset, folder): |
193 |
200 |
print("Please get an export of the Plentymarkets locations\n") |
print("Please get an export of the Plentymarkets locations\n") |
194 |
201 |
# This part changes when dynamic export get deprecated |
# This part changes when dynamic export get deprecated |
195 |
202 |
print("Go to Plentymarkets->Data->dynamic export->ID 49(location dataformat)\n") |
print("Go to Plentymarkets->Data->dynamic export->ID 49(location dataformat)\n") |
196 |
|
print("choose only positive physical content and click on the cog symbol.\n") |
|
197 |
|
print("Save it inside the Report folder.\n") |
|
|
203 |
|
print("choose only positive physical content, the correct barcode type.\n") |
|
204 |
|
print("Click on the cog symbol & Save it inside the Report folder.\n") |
198 |
205 |
print("{0}\n".format(altline)) |
print("{0}\n".format(altline)) |
199 |
206 |
print("Bitte exportiere die aktuellen Plentymarkets Lagerplätze\n") |
print("Bitte exportiere die aktuellen Plentymarkets Lagerplätze\n") |
200 |
207 |
print("Gehe auf Plentymarkets->Daten->dynamischer Export->ID 49(Lagerplatz Datenformat)\n") |
print("Gehe auf Plentymarkets->Daten->dynamischer Export->ID 49(Lagerplatz Datenformat)\n") |
201 |
|
print("Wähle nur positven physischen Lagerbestand und klicke auf das Zahnrad Symbol.\n") |
|
202 |
|
print("Speichere diesen Export im Report Ordner.\n") |
|
|
208 |
|
print("Wähle nur positven physischen Lagerbestand und den richtigen Barcode Typen.\n") |
|
209 |
|
print("Klicke auf das Zahnrad Symbol & Speichere diesen Export im Report Ordner.\n") |
203 |
210 |
print("{0}\n".format(line)) |
print("{0}\n".format(line)) |
204 |
211 |
|
|
205 |
212 |
print("press ENTER to continue...\n") |
print("press ENTER to continue...\n") |
|
... |
... |
def sortIntoPackageList(pickdata, folder): |
291 |
298 |
packlist['filepath'] = tkinter.filedialog.askopenfilename(initialdir=folder, |
packlist['filepath'] = tkinter.filedialog.askopenfilename(initialdir=folder, |
292 |
299 |
title="Package list template from Amazon") |
title="Package list template from Amazon") |
293 |
300 |
|
|
294 |
|
if(packlist['filepath']): |
|
295 |
|
# Get encoding of the file |
|
296 |
|
with open(packlist['filepath'], mode='rb') as item: |
|
297 |
|
raw_data = item.read() |
|
298 |
|
packlist['encoding'] = chardet.detect(raw_data)['encoding'] |
|
299 |
|
|
|
300 |
|
# Read the Data from the location list into a Data Dictionary |
|
301 |
|
# define the fixed header before adding the variable ones that derive from |
|
302 |
|
# the shipment id and the package number |
|
303 |
|
column_names = ["Merchant SKU", "Title", "ASIN", "FNSKU", |
|
304 |
|
"external-id", "Condition", "Who Will Prep?", |
|
305 |
|
"Prep Type", "Who Will Label?", "Shipped"] |
|
306 |
|
|
|
307 |
|
# open the picklist data to determine the highest package number |
|
308 |
|
highest_pkg_num = 0 |
|
309 |
|
for row in pickdata: |
|
310 |
|
for package in pickdata[row]['packages']: |
|
311 |
|
if( pickdata[row]['packages'][package]['packet'] ): |
|
312 |
|
if(int( pickdata[row]['packages'][package]['packet'] ) > int(highest_pkg_num)): |
|
313 |
|
highest_pkg_num = pickdata[row]['packages'][package]['packet'] |
|
314 |
|
|
|
315 |
|
# Determine the Shipment ID by opening the file with csv.reader |
|
316 |
|
with open(packlist['filepath'], mode='r', encoding=packlist['encoding']) as item: |
|
317 |
|
reader = csv.reader(item, delimiter = '\t') |
|
318 |
|
|
|
319 |
|
shipment_id = next(reader)[1] |
|
320 |
|
|
|
321 |
|
# combine the shipment_id with the package number |
|
322 |
|
package_headers = [] |
|
323 |
|
for number in range(int( highest_pkg_num )): |
|
324 |
|
for field_type in ['qty', 'date']: |
|
325 |
|
package_name = shipment_id + 'U' |
|
326 |
|
if(number + 1 < 10): |
|
327 |
|
package_name = package_name + '00' + str( number + 1 ) |
|
328 |
|
else: |
|
329 |
|
package_name = package_name + '0' + str(number + 1) |
|
330 |
|
if(field_type == 'qty'): |
|
331 |
|
package_name = package_name + ' - Unit Quantity' |
|
332 |
|
package_headers.append(package_name) |
|
333 |
|
else: |
|
334 |
|
package_name = package_name + ' Expiration Date (dd.mm.yy)' |
|
335 |
|
package_headers.append(package_name) |
|
336 |
|
|
|
337 |
|
for header in package_headers: |
|
338 |
|
column_names.append(header) |
|
|
301 |
|
try: |
|
302 |
|
if(packlist['filepath']): |
|
303 |
|
# Get encoding of the file |
|
304 |
|
with open(packlist['filepath'], mode='rb') as item: |
|
305 |
|
raw_data = item.read() |
|
306 |
|
packlist['encoding'] = chardet.detect(raw_data)['encoding'] |
|
307 |
|
|
|
308 |
|
# Read the Data from the location list into a Data Dictionary |
|
309 |
|
# define the fixed header before adding the variable ones that derive from |
|
310 |
|
# the shipment id and the package number |
|
311 |
|
column_names = ["Merchant SKU", "Title", "ASIN", "FNSKU", |
|
312 |
|
"external-id", "Condition", "Who Will Prep?", |
|
313 |
|
"Prep Type", "Who Will Label?", "Shipped"] |
|
314 |
|
|
|
315 |
|
# open the picklist data to determine the highest package number |
|
316 |
|
highest_pkg_num = 0 |
|
317 |
|
for row in pickdata: |
|
318 |
|
for package in pickdata[row]['packages']: |
|
319 |
|
if( pickdata[row]['packages'][package]['packet'] ): |
|
320 |
|
if(int( pickdata[row]['packages'][package]['packet'] ) > int(highest_pkg_num)): |
|
321 |
|
highest_pkg_num = pickdata[row]['packages'][package]['packet'] |
|
322 |
|
|
|
323 |
|
# Determine the Shipment ID by opening the file with csv.reader |
|
324 |
|
with open(packlist['filepath'], mode='r', encoding=packlist['encoding']) as item: |
|
325 |
|
reader = csv.reader(item, delimiter = '\t') |
|
326 |
|
|
|
327 |
|
shipment_id = next(reader)[1] |
|
328 |
|
|
|
329 |
|
# combine the shipment_id with the package number |
|
330 |
|
package_headers = [] |
|
331 |
|
for number in range(int( highest_pkg_num )): |
|
332 |
|
for field_type in ['qty', 'date']: |
|
333 |
|
package_name = shipment_id + 'U' |
|
334 |
|
if(number + 1 < 10): |
|
335 |
|
package_name = package_name + '00' + str( number + 1 ) |
|
336 |
|
else: |
|
337 |
|
package_name = package_name + '0' + str(number + 1) |
|
338 |
|
if(field_type == 'qty'): |
|
339 |
|
package_name = package_name + ' - Unit Quantity' |
|
340 |
|
package_headers.append(package_name) |
|
341 |
|
else: |
|
342 |
|
package_name = package_name + ' Expiration Date (dd.mm.yy)' |
|
343 |
|
package_headers.append(package_name) |
|
344 |
|
|
|
345 |
|
for header in package_headers: |
|
346 |
|
column_names.append(header) |
|
347 |
|
|
|
348 |
|
Data = {} |
|
349 |
|
total_quantity = total_quantity_per_package(pickdata) |
|
350 |
|
|
|
351 |
|
with open(packlist['filepath'], mode='r', encoding=packlist['encoding']) as item: |
|
352 |
|
reader = csv.DictReader(item, delimiter='\t', fieldnames=column_names) |
|
353 |
|
|
|
354 |
|
for row in reader: |
|
355 |
|
value = '' |
|
356 |
|
values = [value for value in range( len(column_names) )] |
|
357 |
|
for index, name in enumerate( column_names ): |
|
358 |
|
values[index] = row[column_names[index]] |
|
359 |
|
|
|
360 |
|
Data[row['Merchant SKU']] = dict(zip(column_names, values)) |
339 |
361 |
|
|
340 |
|
Data = {} |
|
|
362 |
|
for row in Data: |
|
363 |
|
if(Data[row]['Merchant SKU'] and Data[row]['FNSKU'] and not( Data[row]['FNSKU'] == 'FNSKU' )): |
|
364 |
|
for pick_row in pickdata: |
|
365 |
|
packages = pickdata[Data[ row ]['Merchant SKU']]['packages'] |
|
366 |
|
for package in packages: |
|
367 |
|
try: |
|
368 |
|
if(packages[package]['packet']): |
|
369 |
|
if(int(packages[package]['packet']) < 10): |
|
370 |
|
zeroes = '00' |
|
371 |
|
else: |
|
372 |
|
zeroes ='0' |
|
373 |
|
else: |
|
374 |
|
print("{0}\n".format(line)) |
|
375 |
|
print( "{0} doesn't have a packet assigned to it!".format( Data[row]['Merchant SKU'] ) ) |
|
376 |
|
print("Correct the mistake and try again.") |
|
377 |
|
print("{0}\n".format(altline)) |
|
378 |
|
print("{0} wurde keine Kiste zugewiesen!".format( Data[row]['Merchant SKU'] )) |
|
379 |
|
print("Fehler beheben und erneut versuchen.") |
|
380 |
|
print("\npress ENTER to continue...") |
|
381 |
|
input() |
|
382 |
|
sys.exit() |
|
383 |
|
except ValueError: |
|
384 |
|
print("SKU: {0}, packet: {1}\n" |
|
385 |
|
.format(pickdata[Data[row]['Merchant SKU']], |
|
386 |
|
packages[package]['packet'] )) |
|
387 |
|
if(packages[package]['packet']): |
|
388 |
|
packet_column = shipment_id + 'U' + zeroes + packages[package]['packet'] + ' - Unit Quantity' |
|
389 |
|
Data[row][packet_column] = total_quantity[packages[package]['sku'] + str(packages[package]['packet'])]['quantity'] |
|
390 |
|
# Write the result to a new file |
|
391 |
|
filepath = folder.replace('Report', 'Upload') + '/packliste-' + todaystr + '.tsv' |
|
392 |
|
|
|
393 |
|
with open(filepath, mode='w') as item: |
|
394 |
|
writer = csv.DictWriter(item, delimiter='\t', fieldnames=column_names) |
|
395 |
|
|
|
396 |
|
# Not writing the header so that the structure of the amazon format stays intact |
|
397 |
|
for row in Data: |
|
398 |
|
writer.writerow(Data[row]) |
|
399 |
|
|
|
400 |
|
# Check if the file was created |
|
401 |
|
if(os.path.isfile(filepath)): |
|
402 |
|
print("The file was succesfully created!\n{0}".format(filepath)) |
|
403 |
|
input() |
|
404 |
|
except KeyError as err: |
|
405 |
|
print("ERROR @ packageprocess: {0} {1}".format(err, sys.exc_info()[2].tb_lineno)) |
|
406 |
|
print("press ENTER to continue...") |
|
407 |
|
input() |
341 |
408 |
|
|
342 |
|
with open(packlist['filepath'], mode='r', encoding=packlist['encoding']) as item: |
|
343 |
|
reader = csv.DictReader(item, delimiter='\t', fieldnames=column_names) |
|
344 |
409 |
|
|
345 |
|
for row in reader: |
|
346 |
|
value = '' |
|
347 |
|
values = [value for value in range( len(column_names) )] |
|
348 |
|
for index, name in enumerate( column_names ): |
|
349 |
|
values[index] = row[column_names[index]] |
|
|
410 |
|
def total_quantity_per_package(dataset): |
350 |
411 |
|
|
351 |
|
Data[row['Merchant SKU']] = dict(zip(column_names, values)) |
|
|
412 |
|
total_qty = {} |
|
413 |
|
column_names = ['package', 'sku', 'locations', 'quantity'] |
352 |
414 |
|
|
353 |
|
for row in Data: |
|
354 |
|
if(Data[row]['Merchant SKU'] and Data[row]['FNSKU'] and not( Data[row]['FNSKU'] == 'FNSKU' )): |
|
355 |
|
for pick_row in pickdata: |
|
356 |
|
for package in pickdata[Data[ row ]['Merchant SKU']]['packages']: |
|
357 |
|
try: |
|
358 |
|
if(pickdata[Data[ row ]['Merchant SKU']]['packages'][package]['packet']): |
|
359 |
|
if(int(pickdata[Data[ row ]['Merchant SKU']]['packages'][package]['packet']) < 10): |
|
360 |
|
zeroes = '00' |
|
361 |
|
else: |
|
362 |
|
zeroes ='0' |
|
363 |
|
else: |
|
364 |
|
print("{0}\n".format(line)) |
|
365 |
|
print( "{0} doesn't have a packet assigned to it!".format( Data[row]['Merchant SKU'] ) ) |
|
366 |
|
print("Correct the mistake and try again.") |
|
367 |
|
print("{0}\n".format(altline)) |
|
368 |
|
print("{0} wurde keine Kiste zugewiesen!".format( Data[row]['Merchant SKU'] )) |
|
369 |
|
print("Fehler beheben und erneut versuchen.") |
|
370 |
|
print("\npress ENTER to continue...") |
|
371 |
|
input() |
|
372 |
|
sys.exit() |
|
373 |
|
except ValueError: |
|
374 |
|
print("SKU: {0}, packet: {1}\n" |
|
375 |
|
.format(pickdata[Data[row]['Merchant SKU']], |
|
376 |
|
pickdata[Data[row]['Merchant SKU']]['packages'][package]['packet'] )) |
|
377 |
|
if(pickdata[Data[ row ]['Merchant SKU']]['packages'][package]['packet']): |
|
378 |
|
packet_column = shipment_id + 'U' + zeroes + pickdata[Data[ row ]['Merchant SKU']]['packages'][package]['packet'] + ' - Unit Quantity' |
|
379 |
|
Data[row][packet_column] = pickdata[Data[row]['Merchant SKU']]['packages'][package]['quantity'] |
|
380 |
|
# Write the result to a new file |
|
381 |
|
filepath = folder.replace('Report', 'Upload') + '/packliste-' + todatstr + '.tsv' |
|
382 |
|
|
|
383 |
|
with open(filepath, mode='w') as item: |
|
384 |
|
writer = csv.DictWriter(item, delimiter='\t', fieldnames=column_names) |
|
385 |
|
|
|
386 |
|
# Not writing the header so that the structure of the amazon format stays intact |
|
387 |
|
for row in Data: |
|
388 |
|
writer.writerow(Data[row]) |
|
|
415 |
|
try: |
|
416 |
|
for row in dataset: |
|
417 |
|
packages = dataset[row]['packages'] |
|
418 |
|
for package in packages: |
|
419 |
|
# When there is no entry with the sku in total_qty create a new entry |
|
420 |
|
if(not(packages[package]['sku'] + str( packages[package]['packet'] ) in [*total_qty])): |
|
421 |
|
values = [packages[package]['packet'], packages[package]['sku'], |
|
422 |
|
[packages[package]['location']], int( packages[package]['quantity'] )] |
|
423 |
|
total_qty[packages[package]['sku'] + str( packages[package]['packet'] )] = dict(zip(column_names, values)) |
|
424 |
|
# Otherwise if Sku is in total_qty attach the location to the list and add the quantity |
|
425 |
|
else: |
|
426 |
|
# if the location is not already in the list |
|
427 |
|
if(not( packages[package]['location'] in total_qty[packages[package]['sku'] + str(packages[package]['packet'])]['locations'] )): |
|
428 |
|
total_qty[packages[package]['sku'] + str(packages[package]['packet'])]['quantity'] += int( packages[package]['quantity'] ) |
|
429 |
|
total_qty[packages[package]['sku'] + str(packages[package]['packet'])]['locations'].append(packages[package]['location']) |
|
430 |
|
# in case that the location is in the list there has to be a mistake |
|
431 |
|
# because this means that there are two entries for the same sku on the same location |
|
432 |
|
# in the picklist |
|
433 |
|
else: |
|
434 |
|
print("ERROR @ total_qty: sku: {0}, location: {1}".format(packages[package]['sku'])) |
|
435 |
|
print("error: There are 2 entries for the exact same sku and location in the picklist") |
|
436 |
|
print("press Enter to continue...") |
|
437 |
|
input() |
|
438 |
|
except KeyError as err: |
|
439 |
|
print("ERROR @ total_qty (KEYERROR): line:{0}, err:{1}". |
|
440 |
|
format(sys.exc_info()[2].tb_lineno, err)) |
|
441 |
|
print("press ENTER to continue..") |
|
442 |
|
input() |
389 |
443 |
|
|
390 |
|
# Check if the file was created |
|
391 |
|
if(os.path.isfile(filepath)): |
|
392 |
|
print("The file was succesfully created!\n{0}".format(filepath)) |
|
|
444 |
|
return total_qty |