dataset_builder.py 51 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304
  1. from vtkmodules.web import camera, javascriptMapping, arrayTypesMapping
  2. from paraview.web import data_writer, data_converter
  3. from vtkmodules.web.query_data_model import DataHandler
  4. from paraview.web.camera import update_camera
  5. from vtkmodules.web import iteritems
  6. from paraview import simple
  7. from paraview import servermanager
  8. from vtkmodules.vtkCommonCore import (
  9. vtkFloatArray,
  10. vtkIdList,
  11. vtkUnsignedCharArray,
  12. vtkTypeUInt32Array,
  13. )
  14. from vtkmodules.vtkCommonDataModel import vtkDataSetAttributes
  15. import json, os, math, gzip, shutil, hashlib
  16. # Global helper variables
  17. encode_codes = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
  18. # -----------------------------------------------------------------------------
  19. # Basic Dataset Builder
  20. # -----------------------------------------------------------------------------
  21. class DataSetBuilder(object):
  22. def __init__(self, location, camera_data, metadata={}, sections={}):
  23. self.dataHandler = DataHandler(location)
  24. self.cameraDescription = camera_data
  25. self.camera = None
  26. for key, value in iteritems(metadata):
  27. self.dataHandler.addMetaData(key, value)
  28. for key, value in iteritems(sections):
  29. self.dataHandler.addSection(key, value)
  30. # Update the can_write flag for MPI
  31. self.dataHandler.can_write = (
  32. servermanager.vtkProcessModule.GetProcessModule().GetPartitionId() == 0
  33. )
  34. def getDataHandler(self):
  35. return self.dataHandler
  36. def getCamera(self):
  37. return self.camera
  38. def updateCamera(self, camera):
  39. update_camera(self.view, camera)
  40. def start(self, view=None):
  41. if view:
  42. # Keep track of the view
  43. self.view = view
  44. # Handle camera if any
  45. if self.cameraDescription:
  46. if self.cameraDescription["type"] == "spherical":
  47. self.camera = camera.SphericalCamera(
  48. self.dataHandler,
  49. view.CenterOfRotation,
  50. view.CameraPosition,
  51. view.CameraViewUp,
  52. self.cameraDescription["phi"],
  53. self.cameraDescription["theta"],
  54. )
  55. elif self.cameraDescription["type"] == "cylindrical":
  56. self.camera = camera.CylindricalCamera(
  57. self.dataHandler,
  58. view.CenterOfRotation,
  59. view.CameraPosition,
  60. view.CameraViewUp,
  61. self.cameraDescription["phi"],
  62. self.cameraDescription["translation"],
  63. )
  64. elif self.cameraDescription["type"] == "cube":
  65. self.camera = camera.CubeCamera(
  66. self.dataHandler,
  67. self.cameraDescription["viewForward"],
  68. self.cameraDescription["viewUp"],
  69. self.cameraDescription["positions"],
  70. )
  71. elif self.cameraDescription["type"] == "cube-stereo":
  72. self.camera = camera.StereoCubeCamera(
  73. self.dataHandler,
  74. self.cameraDescription["viewForward"],
  75. self.cameraDescription["viewUp"],
  76. self.cameraDescription["positions"],
  77. self.cameraDescription["eyeSeparation"],
  78. )
  79. # Update background color
  80. bgColor = view.Background
  81. bgColorString = "rgb(%d, %d, %d)" % tuple(
  82. int(bgColor[i] * 255) for i in range(3)
  83. )
  84. if view.UseGradientBackground:
  85. bgColor2 = view.Background2
  86. bgColor2String = "rgb(%d, %d, %d)" % tuple(
  87. int(bgColor2[i] * 255) for i in range(3)
  88. )
  89. self.dataHandler.addMetaData(
  90. "backgroundColor",
  91. "linear-gradient(%s,%s)" % (bgColor2String, bgColorString),
  92. )
  93. else:
  94. self.dataHandler.addMetaData("backgroundColor", bgColorString)
  95. # Update file patterns
  96. self.dataHandler.updateBasePattern()
  97. def stop(self):
  98. self.dataHandler.writeDataDescriptor()
  99. # -----------------------------------------------------------------------------
  100. # Image Dataset Builder
  101. # -----------------------------------------------------------------------------
  102. class ImageDataSetBuilder(DataSetBuilder):
  103. def __init__(self, location, imageMimeType, cameraInfo, metadata={}):
  104. DataSetBuilder.__init__(self, location, cameraInfo, metadata)
  105. imageExtenstion = "." + imageMimeType.split("/")[1]
  106. self.dataHandler.registerData(
  107. name="image", type="blob", mimeType=imageMimeType, fileName=imageExtenstion
  108. )
  109. def writeImages(self):
  110. for cam in self.camera:
  111. update_camera(self.view, cam)
  112. simple.WriteImage(self.dataHandler.getDataAbsoluteFilePath("image"))
  113. # -----------------------------------------------------------------------------
  114. # Data Prober Dataset Builder
  115. # -----------------------------------------------------------------------------
  116. class DataProberDataSetBuilder(DataSetBuilder):
  117. def __init__(
  118. self,
  119. input,
  120. location,
  121. sampling_dimesions,
  122. fields_to_keep,
  123. custom_probing_bounds=None,
  124. metadata={},
  125. ):
  126. DataSetBuilder.__init__(self, location, None, metadata)
  127. self.fieldsToWrite = fields_to_keep
  128. self.resamplerFilter = simple.ResampleToImage(Input=input)
  129. self.resamplerFilter.SamplingDimensions = sampling_dimesions
  130. if custom_probing_bounds:
  131. self.resamplerFilter.UseInputBounds = 0
  132. self.resamplerFilter.SamplingBounds = custom_probing_bounds
  133. else:
  134. self.resamplerFilter.UseInputBounds = 1
  135. # Register all fields
  136. self.dataHandler.addTypes("data-prober", "binary")
  137. self.DataProber = {
  138. "types": {},
  139. "dimensions": sampling_dimesions,
  140. "ranges": {},
  141. "spacing": [1, 1, 1],
  142. }
  143. for field in self.fieldsToWrite:
  144. self.dataHandler.registerData(
  145. name=field, type="array", rootFile=True, fileName="%s.array" % field
  146. )
  147. def writeData(self, time=0):
  148. if not self.dataHandler.can_write:
  149. return
  150. self.resamplerFilter.UpdatePipeline(time)
  151. imageData = self.resamplerFilter.GetClientSideObject().GetOutput()
  152. self.DataProber["spacing"] = imageData.GetSpacing()
  153. arrays = imageData.GetPointData()
  154. maskArray = arrays.GetArray(vtkDataSetAttributes.GhostArrayName())
  155. for field in self.fieldsToWrite:
  156. array = arrays.GetArray(field)
  157. if array:
  158. if array.GetNumberOfComponents() == 1:
  159. # Push NaN when no value are present instead of 0
  160. for idx in range(maskArray.GetNumberOfTuples()):
  161. if maskArray.GetValue(idx) == 2: # Hidden point
  162. array.SetValue(idx, float("NaN"))
  163. with open(
  164. self.dataHandler.getDataAbsoluteFilePath(field), "wb"
  165. ) as f:
  166. f.write(memoryview(array))
  167. self.expandRange(array)
  168. else:
  169. magarray = array.NewInstance()
  170. magarray.SetNumberOfTuples(array.GetNumberOfTuples())
  171. magarray.SetName(field)
  172. for idx in range(magarray.GetNumberOfTuples()):
  173. if maskArray.GetValue(idx) == 2: # Hidden point
  174. # Push NaN when no value are present
  175. magarray.SetValue(idx, float("NaN"))
  176. else:
  177. entry = array.GetTuple(idx)
  178. mag = self.magnitude(entry)
  179. magarray.SetValue(idx, mag)
  180. with open(
  181. self.dataHandler.getDataAbsoluteFilePath(field), "wb"
  182. ) as f:
  183. f.write(memoryview(magarray))
  184. self.expandRange(magarray)
  185. else:
  186. print("No array for", field)
  187. print(self.resamplerFilter.GetOutput())
  188. def magnitude(self, tuple):
  189. value = 0
  190. for item in tuple:
  191. value += item * item
  192. value = value ** 0.5
  193. return value
  194. def expandRange(self, array):
  195. field = array.GetName()
  196. self.DataProber["types"][field] = javascriptMapping[
  197. arrayTypesMapping[array.GetDataType()]
  198. ]
  199. if field in self.DataProber["ranges"]:
  200. dataRange = array.GetRange()
  201. if dataRange[0] < self.DataProber["ranges"][field][0]:
  202. self.DataProber["ranges"][field][0] = dataRange[0]
  203. if dataRange[1] > self.DataProber["ranges"][field][1]:
  204. self.DataProber["ranges"][field][1] = dataRange[1]
  205. else:
  206. self.DataProber["ranges"][field] = [
  207. array.GetRange()[0],
  208. array.GetRange()[1],
  209. ]
  210. def stop(self, compress=True):
  211. # Rescale spacing to have the smaller value to be 1.0
  212. smallerValue = min(self.DataProber["spacing"])
  213. if smallerValue < 1.0:
  214. self.DataProber["spacing"] = tuple(
  215. i / smallerValue for i in self.DataProber["spacing"]
  216. )
  217. # Push metadata
  218. self.dataHandler.addSection("DataProber", self.DataProber)
  219. # Write metadata
  220. DataSetBuilder.stop(self)
  221. if compress:
  222. for root, dirs, files in os.walk(self.dataHandler.getBasePath()):
  223. print("Compress", root)
  224. for name in files:
  225. if ".array" in name and ".gz" not in name:
  226. with open(os.path.join(root, name), "rb") as f_in:
  227. with gzip.open(
  228. os.path.join(root, name + ".gz"), "wb"
  229. ) as f_out:
  230. shutil.copyfileobj(f_in, f_out)
  231. os.remove(os.path.join(root, name))
  232. # -----------------------------------------------------------------------------
  233. # Float Image with Layer Dataset Builder
  234. # -----------------------------------------------------------------------------
  235. class LayerDataSetBuilder(DataSetBuilder):
  236. def __init__(self, input, location, cameraInfo, imageSize=[500, 500], metadata={}):
  237. DataSetBuilder.__init__(self, location, cameraInfo, metadata)
  238. self.dataRenderer = data_writer.ScalarRenderer(
  239. isWriter=self.dataHandler.can_write
  240. )
  241. self.view = self.dataRenderer.getView()
  242. self.view.ViewSize = imageSize
  243. self.floatImage = {"dimensions": imageSize, "layers": [], "ranges": {}}
  244. self.layerMap = {}
  245. self.input = input
  246. self.activeLayer = None
  247. self.activeField = None
  248. self.layerChanged = False
  249. self.lastTime = -1
  250. # Update data type
  251. self.dataHandler.addTypes("float-image")
  252. def getView(self):
  253. return self.view
  254. def setActiveLayer(self, layer, field, hasMesh=False, activeSource=None):
  255. if activeSource:
  256. self.activeSource = activeSource
  257. else:
  258. self.activeSource = self.input
  259. needDataRegistration = False
  260. if layer not in self.layerMap:
  261. layerObj = {
  262. "name": layer,
  263. "array": field,
  264. "arrays": [field],
  265. "active": True,
  266. "type": "Float32Array",
  267. "hasMesh": hasMesh,
  268. }
  269. self.layerMap[layer] = layerObj
  270. self.floatImage["layers"].append(layerObj)
  271. needDataRegistration = True
  272. # Register layer lighting
  273. self.dataHandler.registerData(
  274. name="%s__light" % layer,
  275. type="array",
  276. rootFile=True,
  277. fileName="%s__light.array" % layer,
  278. categories=["%s__light" % layer],
  279. )
  280. # Register layer mesh
  281. if hasMesh:
  282. self.dataHandler.registerData(
  283. name="%s__mesh" % layer,
  284. type="array",
  285. rootFile=True,
  286. fileName="%s__mesh.array" % layer,
  287. categories=["%s__mesh" % layer],
  288. )
  289. elif field not in self.layerMap[layer]["arrays"]:
  290. self.layerMap[layer]["arrays"].append(field)
  291. needDataRegistration = True
  292. # Keep track of the active data
  293. if self.activeLayer != layer:
  294. self.layerChanged = True
  295. self.activeLayer = layer
  296. self.activeField = field
  297. if needDataRegistration:
  298. self.dataHandler.registerData(
  299. name="%s_%s" % (layer, field),
  300. type="array",
  301. rootFile=True,
  302. fileName="%s_%s.array" % (layer, field),
  303. categories=["%s_%s" % (layer, field)],
  304. )
  305. def writeLayerData(self, time=0):
  306. dataRange = [0, 1]
  307. self.activeSource.UpdatePipeline(time)
  308. if self.activeField and self.activeLayer:
  309. if self.layerChanged or self.lastTime != time:
  310. self.layerChanged = False
  311. self.lastTime = time
  312. # Capture lighting information
  313. for camPos in self.getCamera():
  314. self.view.CameraFocalPoint = camPos["focalPoint"]
  315. self.view.CameraPosition = camPos["position"]
  316. self.view.CameraViewUp = camPos["viewUp"]
  317. self.dataRenderer.writeLightArray(
  318. self.dataHandler.getDataAbsoluteFilePath(
  319. "%s__light" % self.activeLayer
  320. ),
  321. self.activeSource,
  322. )
  323. # Capture mesh information
  324. if self.layerMap[self.activeLayer]["hasMesh"]:
  325. for camPos in self.getCamera():
  326. self.view.CameraFocalPoint = camPos["focalPoint"]
  327. self.view.CameraPosition = camPos["position"]
  328. self.view.CameraViewUp = camPos["viewUp"]
  329. self.dataRenderer.writeMeshArray(
  330. self.dataHandler.getDataAbsoluteFilePath(
  331. "%s__mesh" % self.activeLayer
  332. ),
  333. self.activeSource,
  334. )
  335. for camPos in self.getCamera():
  336. self.view.CameraFocalPoint = camPos["focalPoint"]
  337. self.view.CameraPosition = camPos["position"]
  338. self.view.CameraViewUp = camPos["viewUp"]
  339. dataName = "%s_%s" % (self.activeLayer, self.activeField)
  340. dataRange = self.dataRenderer.writeArray(
  341. self.dataHandler.getDataAbsoluteFilePath(dataName),
  342. self.activeSource,
  343. self.activeField,
  344. )
  345. if self.activeField not in self.floatImage["ranges"]:
  346. self.floatImage["ranges"][self.activeField] = [
  347. dataRange[0],
  348. dataRange[1],
  349. ]
  350. else:
  351. # Expand the ranges
  352. if dataRange[0] < self.floatImage["ranges"][self.activeField][0]:
  353. self.floatImage["ranges"][self.activeField][0] = dataRange[0]
  354. if dataRange[1] > self.floatImage["ranges"][self.activeField][1]:
  355. self.floatImage["ranges"][self.activeField][1] = dataRange[1]
  356. def start(self):
  357. DataSetBuilder.start(self, self.view)
  358. def stop(self, compress=True):
  359. if not self.dataHandler.can_write:
  360. return
  361. # Push metadata
  362. self.dataHandler.addSection("FloatImage", self.floatImage)
  363. # Write metadata
  364. DataSetBuilder.stop(self)
  365. if compress:
  366. for root, dirs, files in os.walk(self.dataHandler.getBasePath()):
  367. print("Compress", root)
  368. for name in files:
  369. if ".array" in name and ".gz" not in name:
  370. with open(os.path.join(root, name), "rb") as f_in:
  371. with gzip.open(
  372. os.path.join(root, name + ".gz"), "wb"
  373. ) as f_out:
  374. shutil.copyfileobj(f_in, f_out)
  375. os.remove(os.path.join(root, name))
  376. # -----------------------------------------------------------------------------
  377. # Composite Dataset Builder
  378. # -----------------------------------------------------------------------------
  379. class CompositeDataSetBuilder(DataSetBuilder):
  380. def __init__(
  381. self, location, sceneConfig, cameraInfo, metadata={}, sections={}, view=None
  382. ):
  383. DataSetBuilder.__init__(self, location, cameraInfo, metadata, sections)
  384. if view:
  385. self.view = view
  386. else:
  387. self.view = simple.CreateView("RenderView")
  388. self.view.ViewSize = sceneConfig["size"]
  389. self.view.CenterAxesVisibility = 0
  390. self.view.OrientationAxesVisibility = 0
  391. self.view.UpdatePropertyInformation()
  392. if "camera" in sceneConfig and "CameraFocalPoint" in sceneConfig["camera"]:
  393. self.view.CenterOfRotation = sceneConfig["camera"]["CameraFocalPoint"]
  394. # Initialize camera
  395. for key, value in iteritems(sceneConfig["camera"]):
  396. self.view.GetProperty(key).SetData(value)
  397. # Create a representation for all scene sources
  398. self.config = sceneConfig
  399. self.representations = []
  400. for data in self.config["scene"]:
  401. rep = simple.Show(data["source"], self.view)
  402. self.representations.append(rep)
  403. if "representation" in data:
  404. for key in data["representation"]:
  405. rep.GetProperty(key).SetData(data["representation"][key])
  406. # Add directory path
  407. self.dataHandler.registerData(
  408. name="directory", rootFile=True, fileName="file.txt", categories=["trash"]
  409. )
  410. def start(self):
  411. DataSetBuilder.start(self, self.view)
  412. def stop(self, compress=True, clean=True):
  413. DataSetBuilder.stop(self)
  414. if not self.dataHandler.can_write:
  415. return
  416. # Make the config serializable
  417. for item in self.config["scene"]:
  418. del item["source"]
  419. # Write the scene to disk
  420. with open(
  421. os.path.join(self.dataHandler.getBasePath(), "config.json"), "w"
  422. ) as f:
  423. f.write(json.dumps(self.config))
  424. dataConverter = data_converter.ConvertCompositeDataToSortedStack(
  425. self.dataHandler.getBasePath()
  426. )
  427. dataConverter.convert()
  428. # Remove tmp files
  429. os.remove(os.path.join(self.dataHandler.getBasePath(), "index.json"))
  430. os.remove(os.path.join(self.dataHandler.getBasePath(), "config.json"))
  431. # Composite pipeline meta description
  432. compositePipeline = {
  433. "default_pipeline": "",
  434. "layers": [],
  435. "fields": {},
  436. "layer_fields": {},
  437. "pipeline": [],
  438. }
  439. rootItems = {}
  440. fieldNameMapping = {}
  441. # Clean scene in config and gather ranges
  442. dataRanges = {}
  443. layerIdx = 0
  444. for layer in self.config["scene"]:
  445. # Create group node if any
  446. if "parent" in layer and layer["parent"] not in rootItems:
  447. rootItems[layer["parent"]] = {
  448. "name": layer["parent"],
  449. "ids": [],
  450. "children": [],
  451. }
  452. compositePipeline["pipeline"].append(rootItems[layer["parent"]])
  453. # Create layer entry
  454. layerCode = encode_codes[layerIdx]
  455. layerItem = {"name": layer["name"], "ids": [layerCode]}
  456. compositePipeline["layers"].append(layerCode)
  457. compositePipeline["layer_fields"][layerCode] = []
  458. compositePipeline["default_pipeline"] += layerCode
  459. # Register layer entry in pipeline
  460. if "parent" in layer:
  461. rootItems[layer["parent"]]["children"].append(layerItem)
  462. rootItems[layer["parent"]]["ids"].append(layerCode)
  463. else:
  464. compositePipeline["pipeline"].append(layerItem)
  465. # Handle color / field
  466. colorByList = []
  467. for color in layer["colors"]:
  468. # Find color code
  469. if color not in fieldNameMapping:
  470. colorCode = encode_codes[len(fieldNameMapping)]
  471. fieldNameMapping[color] = colorCode
  472. compositePipeline["fields"][colorCode] = color
  473. else:
  474. colorCode = fieldNameMapping[color]
  475. # Register color code
  476. compositePipeline["layer_fields"][layerCode].append(colorCode)
  477. if len(colorByList) == 0:
  478. compositePipeline["default_pipeline"] += colorCode
  479. values = None
  480. if "constant" in layer["colors"][color]:
  481. value = layer["colors"][color]["constant"]
  482. values = [value, value]
  483. colorByList.append({"name": color, "type": "const", "value": value})
  484. elif "range" in layer["colors"][color]:
  485. values = layer["colors"][color]["range"]
  486. colorByList.append({"name": color, "type": "field"})
  487. if values:
  488. if color not in dataRanges:
  489. dataRanges[color] = values
  490. else:
  491. dataRanges[color][0] = min(
  492. dataRanges[color][0], values[0], values[1]
  493. )
  494. dataRanges[color][1] = max(
  495. dataRanges[color][1], values[0], values[1]
  496. )
  497. layer["colorBy"] = colorByList
  498. del layer["colors"]
  499. layerIdx += 1
  500. sortedCompositeSection = {
  501. "dimensions": self.config["size"],
  502. "pipeline": self.config["scene"],
  503. "ranges": dataRanges,
  504. "layers": len(self.config["scene"]),
  505. "light": self.config["light"],
  506. }
  507. self.dataHandler.addSection("SortedComposite", sortedCompositeSection)
  508. self.dataHandler.addSection("CompositePipeline", compositePipeline)
  509. self.dataHandler.addTypes("sorted-composite", "multi-color-by")
  510. self.dataHandler.removeData("directory")
  511. for dataToRegister in dataConverter.listData():
  512. self.dataHandler.registerData(**dataToRegister)
  513. self.dataHandler.writeDataDescriptor()
  514. if clean:
  515. for root, dirs, files in os.walk(self.dataHandler.getBasePath()):
  516. print("Clean", root)
  517. for name in files:
  518. if name in ["camera.json"]:
  519. os.remove(os.path.join(root, name))
  520. if compress:
  521. for root, dirs, files in os.walk(self.dataHandler.getBasePath()):
  522. print("Compress", root)
  523. for name in files:
  524. if (".float32" in name or ".uint8" in name) and ".gz" not in name:
  525. with open(os.path.join(root, name), "rb") as f_in:
  526. with gzip.open(
  527. os.path.join(root, name + ".gz"), "wb"
  528. ) as f_out:
  529. shutil.copyfileobj(f_in, f_out)
  530. os.remove(os.path.join(root, name))
  531. def writeData(self):
  532. composite_size = len(self.representations)
  533. self.view.UpdatePropertyInformation()
  534. self.view.Background = [0, 0, 0]
  535. imageSize = self.view.ViewSize[0] * self.view.ViewSize[1]
  536. # Generate the heavy data
  537. for camPos in self.getCamera():
  538. self.view.CameraFocalPoint = camPos["focalPoint"]
  539. self.view.CameraPosition = camPos["position"]
  540. self.view.CameraViewUp = camPos["viewUp"]
  541. # Show all representations
  542. for compositeIdx in range(composite_size):
  543. rep = self.representations[compositeIdx]
  544. rep.Visibility = 1
  545. # Fix camera bounds
  546. self.view.LockBounds = 0
  547. simple.Render(self.view)
  548. self.view.LockBounds = 1
  549. # Update destination directory
  550. dest_path = os.path.dirname(
  551. self.dataHandler.getDataAbsoluteFilePath("directory")
  552. )
  553. # Write camera information
  554. if self.dataHandler.can_write:
  555. with open(os.path.join(dest_path, "camera.json"), "w") as f:
  556. f.write(json.dumps(camPos))
  557. # Hide all representations
  558. for compositeIdx in range(composite_size):
  559. rep = self.representations[compositeIdx]
  560. rep.Visibility = 0
  561. # Show only active Representation
  562. # Extract images for each fields
  563. for compositeIdx in range(composite_size):
  564. rep = self.representations[compositeIdx]
  565. if compositeIdx > 0:
  566. self.representations[compositeIdx - 1].Visibility = 0
  567. rep.Visibility = 1
  568. # capture Z
  569. simple.Render()
  570. zBuffer = self.view.CaptureDepthBuffer()
  571. with open(
  572. os.path.join(dest_path, "depth_%d.float32" % compositeIdx), "wb"
  573. ) as f:
  574. f.write(memoryview(zBuffer))
  575. # Prevent color interference
  576. rep.DiffuseColor = [1, 1, 1]
  577. # Handle light
  578. for lightType in self.config["light"]:
  579. if lightType == "intensity":
  580. rep.AmbientColor = [1, 1, 1]
  581. rep.SpecularColor = [1, 1, 1]
  582. self.view.StartCaptureLuminance()
  583. image = self.view.CaptureWindow(1)
  584. imagescalars = image.GetPointData().GetScalars()
  585. self.view.StopCaptureLuminance()
  586. # Extract specular information
  587. specularOffset = 1 # [diffuse, specular, ?]
  588. imageSize = imagescalars.GetNumberOfTuples()
  589. specularComponent = vtkUnsignedCharArray()
  590. specularComponent.SetNumberOfComponents(1)
  591. specularComponent.SetNumberOfTuples(imageSize)
  592. for idx in range(imageSize):
  593. specularComponent.SetValue(
  594. idx, imagescalars.GetValue(idx * 3 + specularOffset)
  595. )
  596. with open(
  597. os.path.join(
  598. dest_path, "intensity_%d.uint8" % compositeIdx
  599. ),
  600. "wb",
  601. ) as f:
  602. f.write(memoryview(specularComponent))
  603. # Free memory
  604. image.UnRegister(None)
  605. if lightType == "normal":
  606. if rep.Representation in [
  607. "Point Gaussian",
  608. "Points",
  609. "Outline",
  610. "Wireframe",
  611. ]:
  612. uniqNormal = [
  613. (camPos["position"][i] - camPos["focalPoint"][i])
  614. for i in range(3)
  615. ]
  616. tmpNormalArray = vtkFloatArray()
  617. tmpNormalArray.SetNumberOfComponents(1)
  618. tmpNormalArray.SetNumberOfTuples(imageSize)
  619. for comp in range(3):
  620. tmpNormalArray.FillComponent(0, uniqNormal[comp])
  621. with open(
  622. os.path.join(
  623. dest_path,
  624. "normal_%d_%d.float32" % (compositeIdx, comp),
  625. ),
  626. "wb",
  627. ) as f:
  628. f.write(memoryview(tmpNormalArray))
  629. else:
  630. for comp in range(3):
  631. # Configure view to handle POINT_DATA / CELL_DATA
  632. self.view.DrawCells = 0
  633. self.view.ArrayNameToDraw = "Normals"
  634. self.view.ArrayComponentToDraw = comp
  635. self.view.ScalarRange = [-1.0, 1.0]
  636. self.view.StartCaptureValues()
  637. image = self.view.CaptureWindow(1)
  638. imagescalars = image.GetPointData().GetScalars()
  639. self.view.StopCaptureValues()
  640. # Convert RGB => Float => Write
  641. floatArray = data_converter.convertRGBArrayToFloatArray(
  642. imagescalars, [-1.0, 1.0]
  643. )
  644. with open(
  645. os.path.join(
  646. dest_path,
  647. "normal_%d_%d.float32" % (compositeIdx, comp),
  648. ),
  649. "wb",
  650. ) as f:
  651. f.write(memoryview(floatArray))
  652. # Free memory
  653. image.UnRegister(None)
  654. # Handle color by
  655. for fieldName, fieldConfig in iteritems(
  656. self.config["scene"][compositeIdx]["colors"]
  657. ):
  658. if "constant" in fieldConfig:
  659. # Skip nothing to render
  660. continue
  661. # Configure view to handle POINT_DATA / CELL_DATA
  662. if fieldConfig["location"] == "POINT_DATA":
  663. self.view.DrawCells = 0
  664. else:
  665. self.view.DrawCells = 1
  666. self.view.ArrayNameToDraw = fieldName
  667. self.view.ArrayComponentToDraw = 0
  668. self.view.ScalarRange = fieldConfig["range"]
  669. self.view.StartCaptureValues()
  670. image = self.view.CaptureWindow(1)
  671. imagescalars = image.GetPointData().GetScalars()
  672. self.view.StopCaptureValues()
  673. floatArray = data_converter.convertRGBArrayToFloatArray(
  674. imagescalars, fieldConfig["range"]
  675. )
  676. with open(
  677. os.path.join(
  678. dest_path, "%d_%s.float32" % (compositeIdx, fieldName)
  679. ),
  680. "wb",
  681. ) as f:
  682. f.write(memoryview(floatArray))
  683. self.dataHandler.registerData(
  684. name="%d_%s" % (compositeIdx, fieldName),
  685. fileName="/%d_%s.float32" % (compositeIdx, fieldName),
  686. type="array",
  687. categories=["%d_%s" % (compositeIdx, fieldName)],
  688. )
  689. # Free memory
  690. image.UnRegister(None)
  691. # -----------------------------------------------------------------------------
  692. # VTKGeometryDataSetBuilder Dataset Builder
  693. # -----------------------------------------------------------------------------
  694. def writeCellArray(dataHandler, currentData, cellName, inputCellArray):
  695. nbValues = inputCellArray.GetNumberOfTuples()
  696. if nbValues == 0:
  697. return
  698. outputCells = vtkTypeUInt32Array()
  699. outputCells.SetNumberOfTuples(nbValues)
  700. for valueIdx in range(nbValues):
  701. outputCells.SetValue(valueIdx, inputCellArray.GetValue(valueIdx))
  702. iBuffer = memoryview(outputCells)
  703. iMd5 = hashlib.md5(iBuffer).hexdigest()
  704. iPath = os.path.join(dataHandler.getBasePath(), "data", "%s.Uint32Array" % iMd5)
  705. currentData["polys"] = "data/%s.Uint32Array" % iMd5
  706. with open(iPath, "wb") as f:
  707. f.write(iBuffer)
  708. class VTKGeometryDataSetBuilder(DataSetBuilder):
  709. def __init__(self, location, sceneConfig, metadata={}, sections={}):
  710. DataSetBuilder.__init__(self, location, None, metadata, sections)
  711. # Update data type
  712. self.dataHandler.addTypes("vtk-geometry")
  713. # Create a representation for all scene sources
  714. self.config = sceneConfig
  715. # Processing pipeline
  716. self.surfaceExtract = None
  717. # Add directory path
  718. self.dataHandler.registerData(
  719. priority=0, name="scene", rootFile=True, fileName="scene.json", type="json"
  720. )
  721. # Create directory containers
  722. dataPath = os.path.join(location, "data")
  723. for p in [dataPath]:
  724. if not os.path.exists(p):
  725. os.makedirs(p)
  726. # Create metadata structure
  727. colorToCodeMap = {}
  728. parentNodes = {}
  729. pipelineMeta = {"layers": [], "pipeline": [], "layer_fields": {}, "fields": {}}
  730. geometryMeta = {
  731. "ranges": {},
  732. "layer_map": {},
  733. }
  734. self.ranges = geometryMeta["ranges"]
  735. for item in sceneConfig["scene"]:
  736. # Handle layer
  737. layerCode = encode_codes[len(pipelineMeta["layers"])]
  738. pipelineMeta["layers"].append(layerCode)
  739. geometryMeta["layer_map"][layerCode] = item["name"]
  740. # Handle colors
  741. pipelineMeta["layer_fields"][layerCode] = []
  742. for fieldName in item["colors"]:
  743. colorCode = None
  744. if fieldName in colorToCodeMap:
  745. colorCode = colorToCodeMap[fieldName]
  746. else:
  747. colorCode = encode_codes[len(colorToCodeMap)]
  748. colorToCodeMap[fieldName] = colorCode
  749. geometryMeta["ranges"][fieldName] = [
  750. 1,
  751. -1,
  752. ] # FIXME we don't know the range
  753. pipelineMeta["layer_fields"][layerCode].append(colorCode)
  754. pipelineMeta["fields"][colorCode] = fieldName
  755. # Handle pipeline
  756. if "parent" in item:
  757. # Need to handle hierarchy
  758. if item["parent"] in parentNodes:
  759. # Fill children
  760. rootNode = parentNodes[item["parent"]]
  761. rootNode["ids"].append(layerCode)
  762. rootNode["children"].append(
  763. {"name": item["name"], "ids": [layerCode]}
  764. )
  765. else:
  766. # Create root + register
  767. rootNode = {
  768. "name": item["parent"],
  769. "ids": [layerCode],
  770. "children": [{"name": item["name"], "ids": [layerCode]}],
  771. }
  772. parentNodes[item["parent"]] = rootNode
  773. pipelineMeta["pipeline"].append(rootNode)
  774. else:
  775. # Add item info as a new pipeline node
  776. pipelineMeta["pipeline"].append(
  777. {"name": item["name"], "ids": [layerCode]}
  778. )
  779. # Register metadata to be written in index.json
  780. self.dataHandler.addSection("Geometry", geometryMeta)
  781. self.dataHandler.addSection("CompositePipeline", pipelineMeta)
  782. def writeData(self, time=0):
  783. if not self.dataHandler.can_write:
  784. return
  785. currentScene = []
  786. for data in self.config["scene"]:
  787. currentData = {"name": data["name"], "fields": {}, "cells": {}}
  788. currentScene.append(currentData)
  789. if self.surfaceExtract:
  790. self.merge.Input = data["source"]
  791. else:
  792. self.merge = simple.MergeBlocks(Input=data["source"], MergePoints=0)
  793. self.surfaceExtract = simple.ExtractSurface(Input=self.merge)
  794. # Extract surface
  795. self.surfaceExtract.UpdatePipeline(time)
  796. ds = self.surfaceExtract.SMProxy.GetClientSideObject().GetOutputDataObject(
  797. 0
  798. )
  799. originalDS = (
  800. data["source"].SMProxy.GetClientSideObject().GetOutputDataObject(0)
  801. )
  802. originalPoints = ds.GetPoints()
  803. # Points
  804. points = vtkFloatArray()
  805. nbPoints = originalPoints.GetNumberOfPoints()
  806. points.SetNumberOfComponents(3)
  807. points.SetNumberOfTuples(nbPoints)
  808. for idx in range(nbPoints):
  809. coord = originalPoints.GetPoint(idx)
  810. points.SetTuple3(idx, coord[0], coord[1], coord[2])
  811. pBuffer = memoryview(points)
  812. pMd5 = hashlib.md5(pBuffer).hexdigest()
  813. pPath = os.path.join(
  814. self.dataHandler.getBasePath(), "data", "%s.Float32Array" % pMd5
  815. )
  816. currentData["points"] = "data/%s.Float32Array" % pMd5
  817. with open(pPath, "wb") as f:
  818. f.write(pBuffer)
  819. # Handle cells
  820. writeCellArray(
  821. self.dataHandler, currentData["cells"], "verts", ds.GetVerts().GetData()
  822. )
  823. writeCellArray(
  824. self.dataHandler, currentData["cells"], "lines", ds.GetLines().GetData()
  825. )
  826. writeCellArray(
  827. self.dataHandler, currentData["cells"], "polys", ds.GetPolys().GetData()
  828. )
  829. writeCellArray(
  830. self.dataHandler,
  831. currentData["cells"],
  832. "strips",
  833. ds.GetStrips().GetData(),
  834. )
  835. # Fields
  836. for fieldName, fieldInfo in iteritems(data["colors"]):
  837. array = None
  838. if "constant" in fieldInfo:
  839. currentData["fields"][fieldName] = fieldInfo
  840. continue
  841. elif "POINT_DATA" in fieldInfo["location"]:
  842. array = ds.GetPointData().GetArray(fieldName)
  843. elif "CELL_DATA" in fieldInfo["location"]:
  844. array = ds.GetCellData().GetArray(fieldName)
  845. jsType = javascriptMapping[arrayTypesMapping[array.GetDataType()]]
  846. arrayRange = array.GetRange(-1)
  847. tupleSize = array.GetNumberOfComponents()
  848. arraySize = array.GetNumberOfTuples()
  849. if tupleSize == 1:
  850. outputField = array
  851. else:
  852. # compute magnitude
  853. outputField = array.NewInstance()
  854. outputField.SetNumberOfTuples(arraySize)
  855. tupleIdxs = range(tupleSize)
  856. for i in range(arraySize):
  857. magnitude = 0
  858. for j in tupleIdxs:
  859. magnitude += math.pow(array.GetValue(i * tupleSize + j), 2)
  860. outputField.SetValue(i, math.sqrt(magnitude))
  861. fBuffer = memoryview(outputField)
  862. fMd5 = hashlib.md5(fBuffer).hexdigest()
  863. fPath = os.path.join(
  864. self.dataHandler.getBasePath(),
  865. "data",
  866. "%s_%s.%s" % (fieldName, fMd5, jsType),
  867. )
  868. with open(fPath, "wb") as f:
  869. f.write(fBuffer)
  870. currentRange = self.ranges[fieldName]
  871. if currentRange[1] < currentRange[0]:
  872. currentRange[0] = arrayRange[0]
  873. currentRange[1] = arrayRange[1]
  874. else:
  875. currentRange[0] = (
  876. arrayRange[0]
  877. if arrayRange[0] < currentRange[0]
  878. else currentRange[0]
  879. )
  880. currentRange[1] = (
  881. arrayRange[1]
  882. if arrayRange[1] > currentRange[1]
  883. else currentRange[1]
  884. )
  885. currentData["fields"][fieldName] = {
  886. "array": "data/%s_%s.%s" % (fieldName, fMd5, jsType),
  887. "location": fieldInfo["location"],
  888. "range": outputField.GetRange(),
  889. }
  890. # Write scene
  891. with open(self.dataHandler.getDataAbsoluteFilePath("scene"), "w") as f:
  892. f.write(json.dumps(currentScene, indent=4))
  893. def stop(self, compress=True, clean=True):
  894. if not self.dataHandler.can_write:
  895. return
  896. DataSetBuilder.stop(self)
  897. if compress:
  898. for dirName in ["fields", "index", "points"]:
  899. for root, dirs, files in os.walk(
  900. os.path.join(self.dataHandler.getBasePath(), dirName)
  901. ):
  902. print("Compress", root)
  903. for name in files:
  904. if "Array" in name and ".gz" not in name:
  905. with open(os.path.join(root, name), "rb") as f_in:
  906. with gzip.open(
  907. os.path.join(root, name + ".gz"), "wb"
  908. ) as f_out:
  909. shutil.copyfileobj(f_in, f_out)
  910. os.remove(os.path.join(root, name))
  911. # -----------------------------------------------------------------------------
  912. # GeometryDataSetBuilder Dataset Builder
  913. # -----------------------------------------------------------------------------
  914. class GeometryDataSetBuilder(DataSetBuilder):
  915. def __init__(self, location, sceneConfig, metadata={}, sections={}):
  916. DataSetBuilder.__init__(self, location, None, metadata, sections)
  917. # Update data type
  918. self.dataHandler.addTypes("geometry")
  919. # Create a representation for all scene sources
  920. self.config = sceneConfig
  921. # Processing pipeline
  922. self.surfaceExtract = None
  923. # Add directory path
  924. self.dataHandler.registerData(
  925. priority=0, name="scene", rootFile=True, fileName="scene.json", type="json"
  926. )
  927. # Create directory containers
  928. pointsPath = os.path.join(location, "points")
  929. polyPath = os.path.join(location, "index")
  930. colorPath = os.path.join(location, "fields")
  931. for p in [pointsPath, polyPath, colorPath]:
  932. if not os.path.exists(p):
  933. os.makedirs(p)
  934. # Create metadata structure
  935. colorToCodeMap = {}
  936. parentNodes = {}
  937. pipelineMeta = {"layers": [], "pipeline": [], "layer_fields": {}, "fields": {}}
  938. geometryMeta = {"ranges": {}, "layer_map": {}, "object_size": {}}
  939. self.objSize = geometryMeta["object_size"]
  940. for item in sceneConfig["scene"]:
  941. # Handle layer
  942. layerCode = encode_codes[len(pipelineMeta["layers"])]
  943. pipelineMeta["layers"].append(layerCode)
  944. geometryMeta["layer_map"][layerCode] = item["name"]
  945. geometryMeta["object_size"][item["name"]] = {"points": 0, "index": 0}
  946. # Handle colors
  947. pipelineMeta["layer_fields"][layerCode] = []
  948. for fieldName in item["colors"]:
  949. colorCode = None
  950. if fieldName in colorToCodeMap:
  951. colorCode = colorToCodeMap[fieldName]
  952. else:
  953. colorCode = encode_codes[len(colorToCodeMap)]
  954. colorToCodeMap[fieldName] = colorCode
  955. geometryMeta["ranges"][fieldName] = [
  956. 0,
  957. 1,
  958. ] # FIXME we don't know the range
  959. pipelineMeta["layer_fields"][layerCode].append(colorCode)
  960. pipelineMeta["fields"][colorCode] = fieldName
  961. # Handle pipeline
  962. if "parent" in item:
  963. # Need to handle hierarchy
  964. if item["parent"] in parentNodes:
  965. # Fill children
  966. rootNode = parentNodes[item["parent"]]
  967. rootNode["ids"].append(layerCode)
  968. rootNode["children"].append(
  969. {"name": item["name"], "ids": [layerCode]}
  970. )
  971. else:
  972. # Create root + register
  973. rootNode = {
  974. "name": item["parent"],
  975. "ids": [layerCode],
  976. "children": [{"name": item["name"], "ids": [layerCode]}],
  977. }
  978. parentNodes[item["parent"]] = rootNode
  979. pipelineMeta["pipeline"].append(rootNode)
  980. else:
  981. # Add item info as a new pipeline node
  982. pipelineMeta["pipeline"].append(
  983. {"name": item["name"], "ids": [layerCode]}
  984. )
  985. # Register metadata to be written in index.json
  986. self.dataHandler.addSection("Geometry", geometryMeta)
  987. self.dataHandler.addSection("CompositePipeline", pipelineMeta)
  988. def writeData(self, time=0):
  989. if not self.dataHandler.can_write:
  990. return
  991. currentScene = []
  992. for data in self.config["scene"]:
  993. currentData = {"name": data["name"], "fields": {}}
  994. currentScene.append(currentData)
  995. if self.surfaceExtract:
  996. self.merge.Input = data["source"]
  997. else:
  998. self.merge = simple.MergeBlocks(Input=data["source"], MergePoints=0)
  999. self.surfaceExtract = simple.ExtractSurface(Input=self.merge)
  1000. # Extract surface
  1001. self.surfaceExtract.UpdatePipeline(time)
  1002. ds = self.surfaceExtract.SMProxy.GetClientSideObject().GetOutputDataObject(
  1003. 0
  1004. )
  1005. originalDS = (
  1006. data["source"].SMProxy.GetClientSideObject().GetOutputDataObject(0)
  1007. )
  1008. originalPoints = ds.GetPoints()
  1009. # Points
  1010. points = vtkFloatArray()
  1011. nbPoints = originalPoints.GetNumberOfPoints()
  1012. points.SetNumberOfComponents(3)
  1013. points.SetNumberOfTuples(nbPoints)
  1014. for idx in range(nbPoints):
  1015. coord = originalPoints.GetPoint(idx)
  1016. points.SetTuple3(idx, coord[0], coord[1], coord[2])
  1017. pBuffer = memoryview(points)
  1018. pMd5 = hashlib.md5(pBuffer).hexdigest()
  1019. pPath = os.path.join(
  1020. self.dataHandler.getBasePath(), "points", "%s.Float32Array" % pMd5
  1021. )
  1022. currentData["points"] = "points/%s.Float32Array" % pMd5
  1023. with open(pPath, "wb") as f:
  1024. f.write(pBuffer)
  1025. # Polys
  1026. poly = ds.GetPolys()
  1027. nbCells = poly.GetNumberOfCells()
  1028. cellLocation = 0
  1029. idList = vtkIdList()
  1030. topo = vtkTypeUInt32Array()
  1031. topo.Allocate(poly.GetData().GetNumberOfTuples())
  1032. for cellIdx in range(nbCells):
  1033. poly.GetCell(cellLocation, idList)
  1034. cellSize = idList.GetNumberOfIds()
  1035. cellLocation += cellSize + 1
  1036. if cellSize == 3:
  1037. topo.InsertNextValue(idList.GetId(0))
  1038. topo.InsertNextValue(idList.GetId(1))
  1039. topo.InsertNextValue(idList.GetId(2))
  1040. elif cellSize == 4:
  1041. topo.InsertNextValue(idList.GetId(0))
  1042. topo.InsertNextValue(idList.GetId(1))
  1043. topo.InsertNextValue(idList.GetId(3))
  1044. topo.InsertNextValue(idList.GetId(1))
  1045. topo.InsertNextValue(idList.GetId(2))
  1046. topo.InsertNextValue(idList.GetId(3))
  1047. else:
  1048. print("Cell size of", cellSize, "not supported")
  1049. iBuffer = memoryview(topo)
  1050. iMd5 = hashlib.md5(iBuffer).hexdigest()
  1051. iPath = os.path.join(
  1052. self.dataHandler.getBasePath(), "index", "%s.Uint32Array" % iMd5
  1053. )
  1054. currentData["index"] = "index/%s.Uint32Array" % iMd5
  1055. with open(iPath, "wb") as f:
  1056. f.write(iBuffer)
  1057. # Grow object side
  1058. self.objSize[data["name"]]["points"] = max(
  1059. self.objSize[data["name"]]["points"], nbPoints
  1060. )
  1061. self.objSize[data["name"]]["index"] = max(
  1062. self.objSize[data["name"]]["index"], topo.GetNumberOfTuples()
  1063. )
  1064. # Colors / FIXME
  1065. for fieldName, fieldInfo in iteritems(data["colors"]):
  1066. array = ds.GetPointData().GetArray(fieldName)
  1067. tupleSize = array.GetNumberOfComponents()
  1068. arraySize = array.GetNumberOfTuples()
  1069. outputField = vtkFloatArray()
  1070. outputField.SetNumberOfTuples(arraySize)
  1071. if tupleSize == 1:
  1072. for i in range(arraySize):
  1073. outputField.SetValue(i, array.GetValue(i))
  1074. else:
  1075. # compute magnitude
  1076. tupleIdxs = range(tupleSize)
  1077. for i in range(arraySize):
  1078. magnitude = 0
  1079. for j in tupleIdxs:
  1080. magnitude += math.pow(array.GetValue(i * tupleSize + j), 2)
  1081. outputField.SetValue(i, math.sqrt(magnitude))
  1082. fBuffer = memoryview(outputField)
  1083. fMd5 = hashlib.md5(fBuffer).hexdigest()
  1084. fPath = os.path.join(
  1085. self.dataHandler.getBasePath(),
  1086. "fields",
  1087. "%s_%s.Float32Array" % (fieldName, fMd5),
  1088. )
  1089. with open(fPath, "wb") as f:
  1090. f.write(fBuffer)
  1091. currentData["fields"][fieldName] = "fields/%s_%s.Float32Array" % (
  1092. fieldName,
  1093. fMd5,
  1094. )
  1095. # Write scene
  1096. with open(self.dataHandler.getDataAbsoluteFilePath("scene"), "w") as f:
  1097. f.write(json.dumps(currentScene, indent=2))
  1098. def stop(self, compress=True, clean=True):
  1099. if not self.dataHandler.can_write:
  1100. return
  1101. DataSetBuilder.stop(self)
  1102. if compress:
  1103. for dirName in ["fields", "index", "points"]:
  1104. for root, dirs, files in os.walk(
  1105. os.path.join(self.dataHandler.getBasePath(), dirName)
  1106. ):
  1107. print("Compress", root)
  1108. for name in files:
  1109. if "Array" in name and ".gz" not in name:
  1110. with open(os.path.join(root, name), "rb") as f_in:
  1111. with gzip.open(
  1112. os.path.join(root, name + ".gz"), "wb"
  1113. ) as f_out:
  1114. shutil.copyfileobj(f_in, f_out)
  1115. os.remove(os.path.join(root, name))