List of commits:
Subject Hash Author Date (UTC)
adjusted to another sync format and added a new export format that includes the sale price d33a1909c4f1ff88569509ad4510a5419e323136 Basti 2019-03-22 10:27:22
first Version of the Sync Upload de9ea87dff9ced84f635dd05d5ae039be870ae8a Basti 2019-03-19 16:32:44
First commit including Readme and folder structure 7e77aa7abd6013ce56d6878c7004973e32011a13 Basti 2019-03-19 10:44:36
Commit d33a1909c4f1ff88569509ad4510a5419e323136 - adjusted to another sync format and added a new export format that includes the sale price
Author: Basti
Author date (UTC): 2019-03-22 10:27
Committer name: Basti
Committer date (UTC): 2019-03-22 10:27
Parent(s): de9ea87dff9ced84f635dd05d5ae039be870ae8a
Signing key:
Tree: 8aad3e8df232c654faf1d08d4095f2cbb912f13c
File Lines added Lines deleted
packages/__pycache__/syncfile.cpython-37.pyc 0 0
packages/syncfile.py 41 8
File packages/__pycache__/syncfile.cpython-37.pyc changed (mode: 100644) (index 1c0cd29..c17b0d6)
File packages/syncfile.py changed (mode: 100644) (index 60ebb73..58cac2c)
... ... def writeNewCsv(dataset, header, name):
18 18 writer = csv.DictWriter(item, delimiter=";", fieldnames=header) writer = csv.DictWriter(item, delimiter=";", fieldnames=header)
19 19 writer.writeheader() writer.writeheader()
20 20 for row in dataset: for row in dataset:
21 if(dataset[row]['OrderItem.itemVariationId']):
21 if(dataset[row]['RowVariationID']):
22 22 writer.writerow(dataset[row]) writer.writerow(dataset[row])
23 23
24 24 if(isfile(output_path)): if(isfile(output_path)):
 
... ... def writeNewCsv(dataset, header, name):
29 29
30 30 def createSyncFile(export, report, orderid): def createSyncFile(export, report, orderid):
31 31 # Define the headers of the new Sync File # Define the headers of the new Sync File
32 column_names = ['OrderItem.itemVariationId','OrderItem.quantity'
33 , 'OrderItem.warehouseId', 'Order.id', 'Order.statusId']
32 column_names = ['CustomerID','FirstName','LastName','OrderID'
33 ,'OrderType','StoreID','ID_der_Adresse'
34 ,'RowVariationID','RowOrderID','Name'
35 ,'Menge','Währung','Typ','Position','Preis']
36
37 # initialize empty strings which get assigned as soon as the
38 # location is scraped from the report
39 first_name = ''
40 last_name = ''
41 customer_id = ''
42 adress_id = ''
43
44 # Here are the predefined names and ids from the plentymarket system
45 first_names = ['KTW1', 'DTM2']
46 last_names = ['c/o Amazon FC KTW1', '']
47 customer_ids = ['1357', '']
48 adress_ids = ['2517', '']
34 49
35 50 # initialize a dictionary to save the necessary data # initialize a dictionary to save the necessary data
36 51 Data = {} Data = {}
 
... ... def createSyncFile(export, report, orderid):
39 54 with open(report, mode='r') as item: with open(report, mode='r') as item:
40 55 reader = csv.DictReader(item, delimiter="\t") reader = csv.DictReader(item, delimiter="\t")
41 56
42 for row in reader:
57 for position, row in enumerate( reader ):
58 # Check the fullfillment-center-id position of the report file
59 # Then assign the correct contact to the first_name, last_name,
60 # customer_id, adress_id variables
61 if(row['fulfillment-center-id'] == 'KTW1'):
62 first_name = first_names[0]
63 last_name = last_names[0]
64 customer_id = customer_ids[0]
65 adress_id = adress_ids[0]
66 if(row['fulfillment-center-id'] == 'DTM2'):
67 first_name = first_names[1]
68 last_name = last_names[1]
69 customer_id = customer_ids[1]
70 adress_id = adress_ids[1]
71
43 72 # define the values for each column in this case the # define the values for each column in this case the
44 73 # magic numbers are 104 (the id of our storage facility) # magic numbers are 104 (the id of our storage facility)
45 74 # and 3.1 which is the ID of the special FBA Status on Plenty # and 3.1 which is the ID of the special FBA Status on Plenty
46 values = ['', row['quantity'], '104', orderid, '3.1']
75 values = [customer_id, first_name, last_name, orderid
76 , '1', '41560', adress_id
77 , '', '', row['sku']
78 , row['quantity'], 'EUR', 0, position, '']
47 79
48 80 # combine the values with the header names and name each dict key # combine the values with the header names and name each dict key
49 81 # after the SKU, that way the sku can be used to get the right # after the SKU, that way the sku can be used to get the right
 
... ... def createSyncFile(export, report, orderid):
56 88
57 89 for row in reader: for row in reader:
58 90 # Check if the row contains one of the SKU from the list # Check if the row contains one of the SKU from the list
59 if(row['VariationNumber'] in [*Data] and row['VariationID']):
60 Data[row['VariationNumber']]['OrderItem.itemVariationId'] = row['VariationID']
61
91 if(row['Variation.number'] in [*Data] and row['Variation.id']):
92 Data[row['Variation.number']]['RowVariationID'] = row['Variation.id']
93 Data[row['Variation.number']]['Name'] = row['Variation.number']
94 Data[row['Variation.number']]['Preis'] = row['VariationSalesPrice.price']
62 95 # Write the finished Data dictionary into a new file # Write the finished Data dictionary into a new file
63 96
64 97 writeNewCsv(dataset=Data, header=column_names, name="Sync") writeNewCsv(dataset=Data, header=column_names, name="Sync")
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/initBasti/Complete_Order_Plenty_Update

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/initBasti/Complete_Order_Plenty_Update

Clone this repository using git:
git clone git://git.rocketgit.com/user/initBasti/Complete_Order_Plenty_Update

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main