forked from drndos/openspoolman
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtools_3mf.py
More file actions
295 lines (249 loc) · 11.2 KB
/
tools_3mf.py
File metadata and controls
295 lines (249 loc) · 11.2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
import requests
import zipfile
import tempfile
import xml.etree.ElementTree as ET
import pycurl
import urllib.parse
import os
import re
import time
import io
from datetime import datetime
from config import PRINTER_CODE, PRINTER_IP
from urllib.parse import urlparse
from logger import log
def parse_ftp_listing(line):
"""Parse a line from an FTP LIST command."""
parts = line.split(maxsplit=8)
if len(parts) < 9:
return None
return {
'permissions': parts[0],
'links': int(parts[1]),
'owner': parts[2],
'group': parts[3],
'size': int(parts[4]),
'month': parts[5],
'day': int(parts[6]),
'time_or_year': parts[7],
'name': parts[8]
}
def get_base_name(filename):
return filename.rsplit('.', 1)[0]
def parse_date(item):
"""Parse the date and time from the FTP listing item."""
try:
date_str = f"{item['month']} {item['day']} {item['time_or_year']}"
return datetime.strptime(date_str, "%b %d %H:%M")
except ValueError:
return None
def get_filament_order(file):
filament_order = {}
switch_count = 0
for line in file:
match_filament = re.match(r"^M620 S(\d+)[^;\r\n]*", line.decode("utf-8").strip())
if match_filament:
filament = int(match_filament.group(1))
if filament not in filament_order and int(filament) != 255:
filament_order[int(filament)] = switch_count
switch_count += 1
if len(filament_order) == 0:
filament_order = {1:0}
return filament_order
def download3mfFromCloud(url, destFile):
log("Downloading 3MF file from cloud...")
# Download the file and save it to the temporary file
response = requests.get(url)
response.raise_for_status()
destFile.write(response.content)
def download3mfFromFTP(filename, destFile):
log("Downloading 3MF file from FTP...")
ftp_host = PRINTER_IP
ftp_user = "bblp"
ftp_pass = PRINTER_CODE
remote_path = "/cache/" + filename
local_path = destFile.name # 🔹 Download into the current directory
encoded_remote_path = urllib.parse.quote(remote_path)
url = f"ftps://{ftp_host}{encoded_remote_path}"
log(f"[DEBUG] Attempting file download of: {remote_path}") #Log attempted path
# Setup a retry loop
# Try to prevent race condition where trying to access file before it is fully in cache, causing File not found errors
max_retries = 3
for attempt in range(1, max_retries + 1):
with open(local_path, "wb") as f:
c = setupPycurlConnection(ftp_user, ftp_pass)
try:
c.setopt(c.URL, url)
# Set output to file
c.setopt(c.WRITEDATA, f)
log(f"[DEBUG] Attempt {attempt}: Starting download of {filename}...")
# Perform the transfer
c.perform()
log("[DEBUG] File successfully downloaded!")
c.close()
return True # Exit function on success
# Error, check its just a file not found error before retry
except pycurl.error as e:
err_code = e.args[0]
c.close()
if err_code == 78: # File Not Found
if attempt < max_retries:
log(f"[WARNING] File not found. Printer might still be writing. Retrying in 1s...")
time.sleep(2)
continue
else:
log("[ERROR] File not found after max retries.")
log("[DEBUG] Listing found printer files in /cache directory")
buffer = io.BytesIO()
c = setupPycurlConnection(ftp_user, ftp_pass)
c.setopt(c.URL, f"ftps://{ftp_host}/cache/")
c.setopt(c.WRITEDATA, buffer)
c.setopt(c.DIRLISTONLY, True)
try:
c.perform()
log(f"[DEBUG] Directory Listing: {buffer.getvalue().decode('utf-8').splitlines()}")
except:
log("[ERROR] Could not retrieve directory listing.")
# Check if external storage not setup or connected. /cache is denied access
if err_code == 9: # Server denied you to change to the given directory
log("[DEBUG] Printer denied access to /cache path. Ensure external storage is setup to store print files in printer settings.")
break
else:
log(f"[ERROR] Fatal cURL error {err_code}: {e}")
break # Don't retry for non-78 File Not Found errors
def setupPycurlConnection(ftp_user, ftp_pass):
# Setup shared options for curl connections
c = pycurl.Curl()
# 🔹 Setup explicit FTPS connection (like FileZilla)
c.setopt(c.USERPWD, f"{ftp_user}:{ftp_pass}")
# 🔹 Enable SSL/TLS
c.setopt(c.SSL_VERIFYPEER, 0) # Disable SSL verification
c.setopt(c.SSL_VERIFYHOST, 0)
# 🔹 Enable passive mode (like FileZilla)
c.setopt(c.FTP_SSL, c.FTPSSL_ALL)
# 🔹 Enable proper TLS authentication
c.setopt(c.FTPSSLAUTH, c.FTPAUTH_TLS)
return c
def download3mfFromLocalFilesystem(path, destFile):
with open(path, "rb") as src_file:
destFile.write(src_file.read())
def getMetaDataFrom3mf(url):
"""
Download a 3MF file from a URL, unzip it, and parse filament usage.
Args:
url (str): URL to the 3MF file.
Returns:
list[dict]: List of dictionaries with `tray_info_idx` and `used_g`.
"""
try:
metadata = {}
# Create a temporary file
with tempfile.NamedTemporaryFile(delete_on_close=False,delete=True, suffix=".3mf") as temp_file:
temp_file_name = temp_file.name
if url.startswith("http"):
download3mfFromCloud(url, temp_file)
elif url.startswith("local:"):
download3mfFromLocalFilesystem(url.replace("local:", ""), temp_file)
else:
download3mfFromFTP(url.rpartition('/')[-1], temp_file) # Pull just filename to clear out any unexpected paths
temp_file.close()
metadata["model_path"] = url
parsed_url = urlparse(url)
metadata["file"] = os.path.basename(parsed_url.path)
log(f"3MF file downloaded and saved as {temp_file_name}.")
# Unzip the 3MF file
with zipfile.ZipFile(temp_file_name, 'r') as z:
# Check for the Metadata/slice_info.config file
slice_info_path = "Metadata/slice_info.config"
if slice_info_path in z.namelist():
with z.open(slice_info_path) as slice_info_file:
# Parse the XML content of the file
tree = ET.parse(slice_info_file)
root = tree.getroot()
# Extract id and used_g from each filament
"""
<?xml version="1.0" encoding="UTF-8"?>
<config>
<header>
<header_item key="X-BBL-Client-Type" value="slicer"/>
<header_item key="X-BBL-Client-Version" value="01.10.01.50"/>
</header>
<plate>
<metadata key="index" value="1"/>
<metadata key="printer_model_id" value="N2S"/>
<metadata key="nozzle_diameters" value="0.4"/>
<metadata key="timelapse_type" value="0"/>
<metadata key="prediction" value="5450"/>
<metadata key="weight" value="26.91"/>
<metadata key="outside" value="false"/>
<metadata key="support_used" value="false"/>
<metadata key="label_object_enabled" value="true"/>
<object identify_id="930" name="FILENAME.3mf" skipped="false" />
<object identify_id="1030" name="FILENAME.3mf" skipped="false" />
<object identify_id="1130" name="FILENAME.3mf" skipped="false" />
<object identify_id="1230" name="FILENAME.3mf" skipped="false" />
<object identify_id="1330" name="FILENAME.3mf" skipped="false" />
<object identify_id="1430" name="FILENAME.3mf" skipped="false" />
<object identify_id="1530" name="FILENAME.3mf" skipped="false" />
<object identify_id="1630" name="FILENAME.3mf" skipped="false" />
<object identify_id="1730" name="FILENAME.3mf" skipped="false" />
<object identify_id="1830" name="FILENAME.3mf" skipped="false" />
<object identify_id="1930" name="FILENAME.3mf" skipped="false" />
<object identify_id="2030" name="FILENAME.3mf" skipped="false" />
<object identify_id="2130" name="FILENAME.3mf" skipped="false" />
<object identify_id="2230" name="FILENAME.3mf" skipped="false" />
<filament id="1" tray_info_idx="GFL99" type="PLA" color="#0DFF00" used_m="6.79" used_g="20.26" />
<filament id="2" tray_info_idx="GFL99" type="PLA" color="#000000" used_m="0.72" used_g="2.15" />
<filament id="6" tray_info_idx="GFL99" type="PLA" color="#0DFF00" used_m="1.20" used_g="3.58" />
<filament id="7" tray_info_idx="GFL99" type="PLA" color="#000000" used_m="0.31" used_g="0.92" />
<warning msg="bed_temperature_too_high_than_filament" level="1" error_code ="1000C001" />
</plate>
</config>
"""
for meta in root.findall(".//plate/metadata"):
if meta.attrib.get("key") == "index":
metadata["plateID"] = meta.attrib.get("value", "")
usage = {}
filaments= {}
filamentId = 1
for plate in root.findall(".//plate"):
for filament in plate.findall(".//filament"):
used_g = filament.attrib.get("used_g")
#filamentId = int(filament.attrib.get("id"))
usage[filamentId] = used_g
filaments[filamentId] = {"id": filamentId,
"tray_info_idx": filament.attrib.get("tray_info_idx"),
"type":filament.attrib.get("type"),
"color": filament.attrib.get("color"),
"used_g": used_g,
"used_m":filament.attrib.get("used_m")}
filamentId += 1
metadata["filaments"] = filaments
metadata["usage"] = usage
else:
log(f"File '{slice_info_path}' not found in the archive.")
return {}
metadata["image"] = time.strftime('%Y%m%d%H%M%S') + ".png"
with z.open("Metadata/plate_"+metadata["plateID"]+".png") as source_file:
with open(os.path.join(os.getcwd(), 'static', 'prints', metadata["image"]), 'wb') as target_file:
target_file.write(source_file.read())
# Check for the Metadata/slice_info.config file
gcode_path = "Metadata/plate_"+metadata["plateID"]+".gcode"
metadata["gcode_path"] = gcode_path
if gcode_path in z.namelist():
with z.open(gcode_path) as gcode_file:
metadata["filamentOrder"] = get_filament_order(gcode_file)
log(metadata)
return metadata
except requests.exceptions.RequestException as e:
log(f"Error downloading file: {e}")
return {}
except zipfile.BadZipFile:
log("The downloaded file is not a valid 3MF archive.")
return {}
except ET.ParseError:
log("Error parsing the XML file.")
return {}
except Exception as e:
log(f"An unexpected error occurred: {e}")
return {}