1 | /* |
2 | --------------------------------------------------------------------------- |
3 | Open Asset Import Library (assimp) |
4 | --------------------------------------------------------------------------- |
5 | |
6 | Copyright (c) 2006-2017, assimp team |
7 | |
8 | |
9 | All rights reserved. |
10 | |
11 | Redistribution and use of this software in source and binary forms, |
12 | with or without modification, are permitted provided that the following |
13 | conditions are met: |
14 | |
15 | * Redistributions of source code must retain the above |
16 | copyright notice, this list of conditions and the |
17 | following disclaimer. |
18 | |
19 | * Redistributions in binary form must reproduce the above |
20 | copyright notice, this list of conditions and the |
21 | following disclaimer in the documentation and/or other |
22 | materials provided with the distribution. |
23 | |
24 | * Neither the name of the assimp team, nor the names of its |
25 | contributors may be used to endorse or promote products |
26 | derived from this software without specific prior |
27 | written permission of the assimp team. |
28 | |
29 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
30 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
31 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
32 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
33 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
34 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
35 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
36 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
37 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
38 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
39 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
40 | --------------------------------------------------------------------------- |
41 | */ |
42 | |
43 | /** @file Implementation of the STL importer class */ |
44 | |
45 | |
46 | #ifndef ASSIMP_BUILD_NO_NFF_IMPORTER |
47 | |
48 | // internal headers |
49 | #include "NFFLoader.h" |
50 | #include "ParsingUtils.h" |
51 | #include "StandardShapes.h" |
52 | #include "qnan.h" |
53 | #include "fast_atof.h" |
54 | #include "RemoveComments.h" |
55 | #include <assimp/IOSystem.hpp> |
56 | #include <assimp/DefaultLogger.hpp> |
57 | #include <assimp/scene.h> |
58 | #include <assimp/importerdesc.h> |
59 | #include <memory> |
60 | |
61 | |
62 | using namespace Assimp; |
63 | |
64 | static const aiImporterDesc desc = { |
65 | "Neutral File Format Importer" , |
66 | "" , |
67 | "" , |
68 | "" , |
69 | aiImporterFlags_SupportBinaryFlavour, |
70 | 0, |
71 | 0, |
72 | 0, |
73 | 0, |
74 | "enff nff" |
75 | }; |
76 | |
77 | // ------------------------------------------------------------------------------------------------ |
78 | // Constructor to be privately used by Importer |
79 | NFFImporter::NFFImporter() |
80 | {} |
81 | |
82 | // ------------------------------------------------------------------------------------------------ |
83 | // Destructor, private as well |
84 | NFFImporter::~NFFImporter() |
85 | {} |
86 | |
87 | // ------------------------------------------------------------------------------------------------ |
88 | // Returns whether the class can handle the format of the given file. |
89 | bool NFFImporter::CanRead( const std::string& pFile, IOSystem* /*pIOHandler*/, bool /*checkSig*/) const |
90 | { |
91 | return SimpleExtensionCheck(pFile,"nff" ,"enff" ); |
92 | } |
93 | |
94 | // ------------------------------------------------------------------------------------------------ |
95 | // Get the list of all supported file extensions |
96 | const aiImporterDesc* NFFImporter::GetInfo () const |
97 | { |
98 | return &desc; |
99 | } |
100 | |
101 | // ------------------------------------------------------------------------------------------------ |
102 | #define AI_NFF_PARSE_FLOAT(f) \ |
103 | SkipSpaces(&sz); \ |
104 | if (!::IsLineEnd(*sz))sz = fast_atoreal_move<float>(sz, (float&)f); |
105 | |
106 | // ------------------------------------------------------------------------------------------------ |
107 | #define AI_NFF_PARSE_TRIPLE(v) \ |
108 | AI_NFF_PARSE_FLOAT(v[0]) \ |
109 | AI_NFF_PARSE_FLOAT(v[1]) \ |
110 | AI_NFF_PARSE_FLOAT(v[2]) |
111 | |
112 | // ------------------------------------------------------------------------------------------------ |
113 | #define AI_NFF_PARSE_SHAPE_INFORMATION() \ |
114 | aiVector3D center, radius(1.0f,get_qnan(),get_qnan()); \ |
115 | AI_NFF_PARSE_TRIPLE(center); \ |
116 | AI_NFF_PARSE_TRIPLE(radius); \ |
117 | if (is_qnan(radius.z))radius.z = radius.x; \ |
118 | if (is_qnan(radius.y))radius.y = radius.x; \ |
119 | currentMesh.radius = radius; \ |
120 | currentMesh.center = center; |
121 | |
122 | // ------------------------------------------------------------------------------------------------ |
123 | #define AI_NFF2_GET_NEXT_TOKEN() \ |
124 | do \ |
125 | { \ |
126 | if (!GetNextLine(buffer,line)) \ |
127 | {DefaultLogger::get()->warn("NFF2: Unexpected EOF, can't read next token");break;} \ |
128 | SkipSpaces(line,&sz); \ |
129 | } \ |
130 | while(IsLineEnd(*sz)) |
131 | |
132 | |
133 | // ------------------------------------------------------------------------------------------------ |
134 | // Loads the materail table for the NFF2 file format from an external file |
135 | void NFFImporter::LoadNFF2MaterialTable(std::vector<ShadingInfo>& output, |
136 | const std::string& path, IOSystem* pIOHandler) |
137 | { |
138 | std::unique_ptr<IOStream> file( pIOHandler->Open( path, "rb" )); |
139 | |
140 | // Check whether we can read from the file |
141 | if( !file.get()) { |
142 | DefaultLogger::get()->error("NFF2: Unable to open material library " + path + "." ); |
143 | return; |
144 | } |
145 | |
146 | // get the size of the file |
147 | const unsigned int m = (unsigned int)file->FileSize(); |
148 | |
149 | // allocate storage and copy the contents of the file to a memory buffer |
150 | // (terminate it with zero) |
151 | std::vector<char> mBuffer2(m+1); |
152 | TextFileToBuffer(file.get(),mBuffer2); |
153 | const char* buffer = &mBuffer2[0]; |
154 | |
155 | // First of all: remove all comments from the file |
156 | CommentRemover::RemoveLineComments("//" ,&mBuffer2[0]); |
157 | |
158 | // The file should start with the magic sequence "mat" |
159 | if (!TokenMatch(buffer,"mat" ,3)) { |
160 | DefaultLogger::get()->error("NFF2: Not a valid material library " + path + "." ); |
161 | return; |
162 | } |
163 | |
164 | ShadingInfo* curShader = NULL; |
165 | |
166 | // No read the file line per line |
167 | char line[4096]; |
168 | const char* sz; |
169 | while (GetNextLine(buffer,line)) |
170 | { |
171 | SkipSpaces(line,&sz); |
172 | |
173 | // 'version' defines the version of the file format |
174 | if (TokenMatch(sz,"version" ,7)) |
175 | { |
176 | DefaultLogger::get()->info("NFF (Sense8) material library file format: " + std::string(sz)); |
177 | } |
178 | // 'matdef' starts a new material in the file |
179 | else if (TokenMatch(sz,"matdef" ,6)) |
180 | { |
181 | // add a new material to the list |
182 | output.push_back( ShadingInfo() ); |
183 | curShader = & output.back(); |
184 | |
185 | // parse the name of the material |
186 | } |
187 | else if (!TokenMatch(sz,"valid" ,5)) |
188 | { |
189 | // check whether we have an active material at the moment |
190 | if (!IsLineEnd(*sz)) |
191 | { |
192 | if (!curShader) |
193 | { |
194 | DefaultLogger::get()->error(std::string("NFF2 material library: Found element " ) + |
195 | sz + "but there is no active material" ); |
196 | continue; |
197 | } |
198 | } |
199 | else continue; |
200 | |
201 | // now read the material property and determine its type |
202 | aiColor3D c; |
203 | if (TokenMatch(sz,"ambient" ,7)) |
204 | { |
205 | AI_NFF_PARSE_TRIPLE(c); |
206 | curShader->ambient = c; |
207 | } |
208 | else if (TokenMatch(sz,"diffuse" ,7) || TokenMatch(sz,"ambientdiffuse" ,14) /* correct? */) |
209 | { |
210 | AI_NFF_PARSE_TRIPLE(c); |
211 | curShader->diffuse = curShader->ambient = c; |
212 | } |
213 | else if (TokenMatch(sz,"specular" ,8)) |
214 | { |
215 | AI_NFF_PARSE_TRIPLE(c); |
216 | curShader->specular = c; |
217 | } |
218 | else if (TokenMatch(sz,"emission" ,8)) |
219 | { |
220 | AI_NFF_PARSE_TRIPLE(c); |
221 | curShader->emissive = c; |
222 | } |
223 | else if (TokenMatch(sz,"shininess" ,9)) |
224 | { |
225 | AI_NFF_PARSE_FLOAT(curShader->shininess); |
226 | } |
227 | else if (TokenMatch(sz,"opacity" ,7)) |
228 | { |
229 | AI_NFF_PARSE_FLOAT(curShader->opacity); |
230 | } |
231 | } |
232 | } |
233 | } |
234 | |
235 | // ------------------------------------------------------------------------------------------------ |
236 | // Imports the given file into the given scene structure. |
237 | void NFFImporter::InternReadFile( const std::string& pFile, |
238 | aiScene* pScene, IOSystem* pIOHandler) |
239 | { |
240 | std::unique_ptr<IOStream> file( pIOHandler->Open( pFile, "rb" )); |
241 | |
242 | // Check whether we can read from the file |
243 | if( !file.get()) |
244 | throw DeadlyImportError( "Failed to open NFF file " + pFile + "." ); |
245 | |
246 | // allocate storage and copy the contents of the file to a memory buffer |
247 | // (terminate it with zero) |
248 | std::vector<char> mBuffer2; |
249 | TextFileToBuffer(file.get(),mBuffer2); |
250 | const char* buffer = &mBuffer2[0]; |
251 | |
252 | // mesh arrays - separate here to make the handling of the pointers below easier. |
253 | std::vector<MeshInfo> meshes; |
254 | std::vector<MeshInfo> meshesWithNormals; |
255 | std::vector<MeshInfo> meshesWithUVCoords; |
256 | std::vector<MeshInfo> meshesLocked; |
257 | |
258 | char line[4096]; |
259 | const char* sz; |
260 | |
261 | // camera parameters |
262 | aiVector3D camPos, camUp(0.f,1.f,0.f), camLookAt(0.f,0.f,1.f); |
263 | float angle = 45.f; |
264 | aiVector2D resolution; |
265 | |
266 | bool hasCam = false; |
267 | |
268 | MeshInfo* currentMeshWithNormals = NULL; |
269 | MeshInfo* currentMesh = NULL; |
270 | MeshInfo* currentMeshWithUVCoords = NULL; |
271 | |
272 | ShadingInfo s; // current material info |
273 | |
274 | // degree of tesselation |
275 | unsigned int iTesselation = 4; |
276 | |
277 | // some temporary variables we need to parse the file |
278 | unsigned int sphere = 0, |
279 | cylinder = 0, |
280 | cone = 0, |
281 | numNamed = 0, |
282 | dodecahedron = 0, |
283 | octahedron = 0, |
284 | tetrahedron = 0, |
285 | hexahedron = 0; |
286 | |
287 | // lights imported from the file |
288 | std::vector<Light> lights; |
289 | |
290 | // check whether this is the NFF2 file format |
291 | if (TokenMatch(buffer,"nff" ,3)) |
292 | { |
293 | const float qnan = get_qnan(); |
294 | const aiColor4D cQNAN = aiColor4D (qnan,0.f,0.f,1.f); |
295 | const aiVector3D vQNAN = aiVector3D(qnan,0.f,0.f); |
296 | |
297 | // another NFF file format ... just a raw parser has been implemented |
298 | // no support for further details, I don't think it is worth the effort |
299 | // http://ozviz.wasp.uwa.edu.au/~pbourke/dataformats/nff/nff2.html |
300 | // http://www.netghost.narod.ru/gff/graphics/summary/sense8.htm |
301 | |
302 | // First of all: remove all comments from the file |
303 | CommentRemover::RemoveLineComments("//" ,&mBuffer2[0]); |
304 | |
305 | while (GetNextLine(buffer,line)) |
306 | { |
307 | SkipSpaces(line,&sz); |
308 | if (TokenMatch(sz,"version" ,7)) |
309 | { |
310 | DefaultLogger::get()->info("NFF (Sense8) file format: " + std::string(sz)); |
311 | } |
312 | else if (TokenMatch(sz,"viewpos" ,7)) |
313 | { |
314 | AI_NFF_PARSE_TRIPLE(camPos); |
315 | hasCam = true; |
316 | } |
317 | else if (TokenMatch(sz,"viewdir" ,7)) |
318 | { |
319 | AI_NFF_PARSE_TRIPLE(camLookAt); |
320 | hasCam = true; |
321 | } |
322 | // This starts a new object section |
323 | else if (!IsSpaceOrNewLine(*sz)) |
324 | { |
325 | unsigned int subMeshIdx = 0; |
326 | |
327 | // read the name of the object, skip all spaces |
328 | // at the end of it. |
329 | const char* sz3 = sz; |
330 | while (!IsSpaceOrNewLine(*sz))++sz; |
331 | std::string objectName = std::string(sz3,(unsigned int)(sz-sz3)); |
332 | |
333 | const unsigned int objStart = (unsigned int)meshes.size(); |
334 | |
335 | // There could be a material table in a separate file |
336 | std::vector<ShadingInfo> materialTable; |
337 | while (true) |
338 | { |
339 | AI_NFF2_GET_NEXT_TOKEN(); |
340 | |
341 | // material table - an external file |
342 | if (TokenMatch(sz,"mtable" ,6)) |
343 | { |
344 | SkipSpaces(&sz); |
345 | sz3 = sz; |
346 | while (!IsSpaceOrNewLine(*sz))++sz; |
347 | const unsigned int diff = (unsigned int)(sz-sz3); |
348 | if (!diff)DefaultLogger::get()->warn("NFF2: Found empty mtable token" ); |
349 | else |
350 | { |
351 | // The material table has the file extension .mat. |
352 | // If it is not there, we need to append it |
353 | std::string path = std::string(sz3,diff); |
354 | if(std::string::npos == path.find_last_of(".mat" )) |
355 | { |
356 | path.append(".mat" ); |
357 | } |
358 | |
359 | // Now extract the working directory from the path to |
360 | // this file and append the material library filename |
361 | // to it. |
362 | std::string::size_type s; |
363 | if ((std::string::npos == (s = path.find_last_of('\\')) || !s) && |
364 | (std::string::npos == (s = path.find_last_of('/')) || !s) ) |
365 | { |
366 | s = pFile.find_last_of('\\'); |
367 | if (std::string::npos == s)s = pFile.find_last_of('/'); |
368 | if (std::string::npos != s) |
369 | { |
370 | path = pFile.substr(0,s+1) + path; |
371 | } |
372 | } |
373 | LoadNFF2MaterialTable(materialTable,path,pIOHandler); |
374 | } |
375 | } |
376 | else break; |
377 | } |
378 | |
379 | // read the numbr of vertices |
380 | unsigned int num = ::strtoul10(sz,&sz); |
381 | |
382 | // temporary storage |
383 | std::vector<aiColor4D> tempColors; |
384 | std::vector<aiVector3D> tempPositions,tempTextureCoords,tempNormals; |
385 | |
386 | bool hasNormals = false,hasUVs = false,hasColor = false; |
387 | |
388 | tempPositions.reserve (num); |
389 | tempColors.reserve (num); |
390 | tempNormals.reserve (num); |
391 | tempTextureCoords.reserve (num); |
392 | for (unsigned int i = 0; i < num; ++i) |
393 | { |
394 | AI_NFF2_GET_NEXT_TOKEN(); |
395 | aiVector3D v; |
396 | AI_NFF_PARSE_TRIPLE(v); |
397 | tempPositions.push_back(v); |
398 | |
399 | // parse all other attributes in the line |
400 | while (true) |
401 | { |
402 | SkipSpaces(&sz); |
403 | if (IsLineEnd(*sz))break; |
404 | |
405 | // color definition |
406 | if (TokenMatch(sz,"0x" ,2)) |
407 | { |
408 | hasColor = true; |
409 | unsigned int numIdx = ::strtoul16(sz,&sz); |
410 | aiColor4D clr; |
411 | clr.a = 1.f; |
412 | |
413 | // 0xRRGGBB |
414 | clr.r = ((numIdx >> 16u) & 0xff) / 255.f; |
415 | clr.g = ((numIdx >> 8u) & 0xff) / 255.f; |
416 | clr.b = ((numIdx) & 0xff) / 255.f; |
417 | tempColors.push_back(clr); |
418 | } |
419 | // normal vector |
420 | else if (TokenMatch(sz,"norm" ,4)) |
421 | { |
422 | hasNormals = true; |
423 | AI_NFF_PARSE_TRIPLE(v); |
424 | tempNormals.push_back(v); |
425 | } |
426 | // UV coordinate |
427 | else if (TokenMatch(sz,"uv" ,2)) |
428 | { |
429 | hasUVs = true; |
430 | AI_NFF_PARSE_FLOAT(v.x); |
431 | AI_NFF_PARSE_FLOAT(v.y); |
432 | v.z = 0.f; |
433 | tempTextureCoords.push_back(v); |
434 | } |
435 | } |
436 | |
437 | // fill in dummies for all attributes that have not been set |
438 | if (tempNormals.size() != tempPositions.size()) |
439 | tempNormals.push_back(vQNAN); |
440 | |
441 | if (tempTextureCoords.size() != tempPositions.size()) |
442 | tempTextureCoords.push_back(vQNAN); |
443 | |
444 | if (tempColors.size() != tempPositions.size()) |
445 | tempColors.push_back(cQNAN); |
446 | } |
447 | |
448 | AI_NFF2_GET_NEXT_TOKEN(); |
449 | if (!num)throw DeadlyImportError("NFF2: There are zero vertices" ); |
450 | num = ::strtoul10(sz,&sz); |
451 | |
452 | std::vector<unsigned int> tempIdx; |
453 | tempIdx.reserve(10); |
454 | for (unsigned int i = 0; i < num; ++i) |
455 | { |
456 | AI_NFF2_GET_NEXT_TOKEN(); |
457 | SkipSpaces(line,&sz); |
458 | unsigned int numIdx = ::strtoul10(sz,&sz); |
459 | |
460 | // read all faces indices |
461 | if (numIdx) |
462 | { |
463 | // mesh.faces.push_back(numIdx); |
464 | // tempIdx.erase(tempIdx.begin(),tempIdx.end()); |
465 | tempIdx.resize(numIdx); |
466 | |
467 | for (unsigned int a = 0; a < numIdx;++a) |
468 | { |
469 | SkipSpaces(sz,&sz); |
470 | unsigned int m = ::strtoul10(sz,&sz); |
471 | if (m >= (unsigned int)tempPositions.size()) |
472 | { |
473 | DefaultLogger::get()->error("NFF2: Vertex index overflow" ); |
474 | m= 0; |
475 | } |
476 | // mesh.vertices.push_back (tempPositions[idx]); |
477 | tempIdx[a] = m; |
478 | } |
479 | } |
480 | |
481 | // build a temporary shader object for the face. |
482 | ShadingInfo shader; |
483 | unsigned int matIdx = 0; |
484 | |
485 | // white material color - we have vertex colors |
486 | shader.color = aiColor3D(1.f,1.f,1.f); |
487 | aiColor4D c = aiColor4D(1.f,1.f,1.f,1.f); |
488 | while (true) |
489 | { |
490 | SkipSpaces(sz,&sz); |
491 | if(IsLineEnd(*sz))break; |
492 | |
493 | // per-polygon colors |
494 | if (TokenMatch(sz,"0x" ,2)) |
495 | { |
496 | hasColor = true; |
497 | const char* sz2 = sz; |
498 | numIdx = ::strtoul16(sz,&sz); |
499 | const unsigned int diff = (unsigned int)(sz-sz2); |
500 | |
501 | // 0xRRGGBB |
502 | if (diff > 3) |
503 | { |
504 | c.r = ((numIdx >> 16u) & 0xff) / 255.f; |
505 | c.g = ((numIdx >> 8u) & 0xff) / 255.f; |
506 | c.b = ((numIdx) & 0xff) / 255.f; |
507 | } |
508 | // 0xRGB |
509 | else |
510 | { |
511 | c.r = ((numIdx >> 8u) & 0xf) / 16.f; |
512 | c.g = ((numIdx >> 4u) & 0xf) / 16.f; |
513 | c.b = ((numIdx) & 0xf) / 16.f; |
514 | } |
515 | } |
516 | // TODO - implement texture mapping here |
517 | #if 0 |
518 | // mirror vertex texture coordinate? |
519 | else if (TokenMatch(sz,"mirror" ,6)) |
520 | { |
521 | } |
522 | // texture coordinate scaling |
523 | else if (TokenMatch(sz,"scale" ,5)) |
524 | { |
525 | } |
526 | // texture coordinate translation |
527 | else if (TokenMatch(sz,"trans" ,5)) |
528 | { |
529 | } |
530 | // texture coordinate rotation angle |
531 | else if (TokenMatch(sz,"rot" ,3)) |
532 | { |
533 | } |
534 | #endif |
535 | |
536 | // texture file name for this polygon + mapping information |
537 | else if ('_' == sz[0]) |
538 | { |
539 | // get mapping information |
540 | switch (sz[1]) |
541 | { |
542 | case 'v': |
543 | case 'V': |
544 | |
545 | shader.shaded = false; |
546 | break; |
547 | |
548 | case 't': |
549 | case 'T': |
550 | case 'u': |
551 | case 'U': |
552 | |
553 | DefaultLogger::get()->warn("Unsupported NFF2 texture attribute: trans" ); |
554 | }; |
555 | if (!sz[1] || '_' != sz[2]) |
556 | { |
557 | DefaultLogger::get()->warn("NFF2: Expected underscore after texture attributes" ); |
558 | continue; |
559 | } |
560 | const char* sz2 = sz+3; |
561 | while (!IsSpaceOrNewLine( *sz ))++sz; |
562 | const unsigned int diff = (unsigned int)(sz-sz2); |
563 | if (diff)shader.texFile = std::string(sz2,diff); |
564 | } |
565 | |
566 | // Two-sided material? |
567 | else if (TokenMatch(sz,"both" ,4)) |
568 | { |
569 | shader.twoSided = true; |
570 | } |
571 | |
572 | // Material ID? |
573 | else if (!materialTable.empty() && TokenMatch(sz,"matid" ,5)) |
574 | { |
575 | SkipSpaces(&sz); |
576 | matIdx = ::strtoul10(sz,&sz); |
577 | if (matIdx >= materialTable.size()) |
578 | { |
579 | DefaultLogger::get()->error("NFF2: Material index overflow." ); |
580 | matIdx = 0; |
581 | } |
582 | |
583 | // now combine our current shader with the shader we |
584 | // read from the material table. |
585 | ShadingInfo& mat = materialTable[matIdx]; |
586 | shader.ambient = mat.ambient; |
587 | shader.diffuse = mat.diffuse; |
588 | shader.emissive = mat.emissive; |
589 | shader.opacity = mat.opacity; |
590 | shader.specular = mat.specular; |
591 | shader.shininess = mat.shininess; |
592 | } |
593 | else SkipToken(sz); |
594 | } |
595 | |
596 | // search the list of all shaders we have for this object whether |
597 | // there is an identical one. In this case, we append our mesh |
598 | // data to it. |
599 | MeshInfo* mesh = NULL; |
600 | for (std::vector<MeshInfo>::iterator it = meshes.begin() + objStart, end = meshes.end(); |
601 | it != end; ++it) |
602 | { |
603 | if ((*it).shader == shader && (*it).matIndex == matIdx) |
604 | { |
605 | // we have one, we can append our data to it |
606 | mesh = &(*it); |
607 | } |
608 | } |
609 | if (!mesh) |
610 | { |
611 | meshes.push_back(MeshInfo(PatchType_Simple,false)); |
612 | mesh = &meshes.back(); |
613 | mesh->matIndex = matIdx; |
614 | |
615 | // We need to add a new mesh to the list. We assign |
616 | // an unique name to it to make sure the scene will |
617 | // pass the validation step for the moment. |
618 | // TODO: fix naming of objects in the scenegraph later |
619 | if (objectName.length()) |
620 | { |
621 | ::strcpy(mesh->name,objectName.c_str()); |
622 | ASSIMP_itoa10(&mesh->name[objectName.length()],30,subMeshIdx++); |
623 | } |
624 | |
625 | // copy the shader to the mesh. |
626 | mesh->shader = shader; |
627 | } |
628 | |
629 | // fill the mesh with data |
630 | if (!tempIdx.empty()) |
631 | { |
632 | mesh->faces.push_back((unsigned int)tempIdx.size()); |
633 | for (std::vector<unsigned int>::const_iterator it = tempIdx.begin(), end = tempIdx.end(); |
634 | it != end;++it) |
635 | { |
636 | unsigned int m = *it; |
637 | |
638 | // copy colors -vertex color specifications override polygon color specifications |
639 | if (hasColor) |
640 | { |
641 | const aiColor4D& clr = tempColors[m]; |
642 | mesh->colors.push_back((is_qnan( clr.r ) ? c : clr)); |
643 | } |
644 | |
645 | // positions should always be there |
646 | mesh->vertices.push_back (tempPositions[m]); |
647 | |
648 | // copy normal vectors |
649 | if (hasNormals) |
650 | mesh->normals.push_back (tempNormals[m]); |
651 | |
652 | // copy texture coordinates |
653 | if (hasUVs) |
654 | mesh->uvs.push_back (tempTextureCoords[m]); |
655 | } |
656 | } |
657 | } |
658 | if (!num)throw DeadlyImportError("NFF2: There are zero faces" ); |
659 | } |
660 | } |
661 | camLookAt = camLookAt + camPos; |
662 | } |
663 | else // "Normal" Neutral file format that is quite more common |
664 | { |
665 | while (GetNextLine(buffer,line)) |
666 | { |
667 | sz = line; |
668 | if ('p' == line[0] || TokenMatch(sz,"tpp" ,3)) |
669 | { |
670 | MeshInfo* out = NULL; |
671 | |
672 | // 'tpp' - texture polygon patch primitive |
673 | if ('t' == line[0]) |
674 | { |
675 | currentMeshWithUVCoords = NULL; |
676 | for (auto &mesh : meshesWithUVCoords) |
677 | { |
678 | if (mesh.shader == s) |
679 | { |
680 | currentMeshWithUVCoords = &mesh; |
681 | break; |
682 | } |
683 | } |
684 | |
685 | if (!currentMeshWithUVCoords) |
686 | { |
687 | meshesWithUVCoords.push_back(MeshInfo(PatchType_UVAndNormals)); |
688 | currentMeshWithUVCoords = &meshesWithUVCoords.back(); |
689 | currentMeshWithUVCoords->shader = s; |
690 | } |
691 | out = currentMeshWithUVCoords; |
692 | } |
693 | // 'pp' - polygon patch primitive |
694 | else if ('p' == line[1]) |
695 | { |
696 | currentMeshWithNormals = NULL; |
697 | for (auto &mesh : meshesWithNormals) |
698 | { |
699 | if (mesh.shader == s) |
700 | { |
701 | currentMeshWithNormals = &mesh; |
702 | break; |
703 | } |
704 | } |
705 | |
706 | if (!currentMeshWithNormals) |
707 | { |
708 | meshesWithNormals.push_back(MeshInfo(PatchType_Normals)); |
709 | currentMeshWithNormals = &meshesWithNormals.back(); |
710 | currentMeshWithNormals->shader = s; |
711 | } |
712 | sz = &line[2];out = currentMeshWithNormals; |
713 | } |
714 | // 'p' - polygon primitive |
715 | else |
716 | { |
717 | currentMesh = NULL; |
718 | for (auto &mesh : meshes) |
719 | { |
720 | if (mesh.shader == s) |
721 | { |
722 | currentMesh = &mesh; |
723 | break; |
724 | } |
725 | } |
726 | |
727 | if (!currentMesh) |
728 | { |
729 | meshes.push_back(MeshInfo(PatchType_Simple)); |
730 | currentMesh = &meshes.back(); |
731 | currentMesh->shader = s; |
732 | } |
733 | sz = &line[1];out = currentMesh; |
734 | } |
735 | SkipSpaces(sz,&sz); |
736 | unsigned int m = strtoul10(sz); |
737 | |
738 | // ---- flip the face order |
739 | out->vertices.resize(out->vertices.size()+m); |
740 | if (out != currentMesh) |
741 | { |
742 | out->normals.resize(out->vertices.size()); |
743 | } |
744 | if (out == currentMeshWithUVCoords) |
745 | { |
746 | out->uvs.resize(out->vertices.size()); |
747 | } |
748 | for (unsigned int n = 0; n < m;++n) |
749 | { |
750 | if(!GetNextLine(buffer,line)) |
751 | { |
752 | DefaultLogger::get()->error("NFF: Unexpected EOF was encountered. Patch definition incomplete" ); |
753 | continue; |
754 | } |
755 | |
756 | aiVector3D v; sz = &line[0]; |
757 | AI_NFF_PARSE_TRIPLE(v); |
758 | out->vertices[out->vertices.size()-n-1] = v; |
759 | |
760 | if (out != currentMesh) |
761 | { |
762 | AI_NFF_PARSE_TRIPLE(v); |
763 | out->normals[out->vertices.size()-n-1] = v; |
764 | } |
765 | if (out == currentMeshWithUVCoords) |
766 | { |
767 | // FIX: in one test file this wraps over multiple lines |
768 | SkipSpaces(&sz); |
769 | if (IsLineEnd(*sz)) |
770 | { |
771 | GetNextLine(buffer,line); |
772 | sz = line; |
773 | } |
774 | AI_NFF_PARSE_FLOAT(v.x); |
775 | SkipSpaces(&sz); |
776 | if (IsLineEnd(*sz)) |
777 | { |
778 | GetNextLine(buffer,line); |
779 | sz = line; |
780 | } |
781 | AI_NFF_PARSE_FLOAT(v.y); |
782 | v.y = 1.f - v.y; |
783 | out->uvs[out->vertices.size()-n-1] = v; |
784 | } |
785 | } |
786 | out->faces.push_back(m); |
787 | } |
788 | // 'f' - shading information block |
789 | else if (TokenMatch(sz,"f" ,1)) |
790 | { |
791 | float d; |
792 | |
793 | // read the RGB colors |
794 | AI_NFF_PARSE_TRIPLE(s.color); |
795 | |
796 | // read the other properties |
797 | AI_NFF_PARSE_FLOAT(s.diffuse.r); |
798 | AI_NFF_PARSE_FLOAT(s.specular.r); |
799 | AI_NFF_PARSE_FLOAT(d); // skip shininess and transmittance |
800 | AI_NFF_PARSE_FLOAT(d); |
801 | AI_NFF_PARSE_FLOAT(s.refracti); |
802 | |
803 | // NFF2 uses full colors here so we need to use them too |
804 | // although NFF uses simple scaling factors |
805 | s.diffuse.g = s.diffuse.b = s.diffuse.r; |
806 | s.specular.g = s.specular.b = s.specular.r; |
807 | |
808 | // if the next one is NOT a number we assume it is a texture file name |
809 | // this feature is used by some NFF files on the internet and it has |
810 | // been implemented as it can be really useful |
811 | SkipSpaces(&sz); |
812 | if (!IsNumeric(*sz)) |
813 | { |
814 | // TODO: Support full file names with spaces and quotation marks ... |
815 | const char* p = sz; |
816 | while (!IsSpaceOrNewLine( *sz ))++sz; |
817 | |
818 | unsigned int diff = (unsigned int)(sz-p); |
819 | if (diff) |
820 | { |
821 | s.texFile = std::string(p,diff); |
822 | } |
823 | } |
824 | else |
825 | { |
826 | AI_NFF_PARSE_FLOAT(s.ambient); // optional |
827 | } |
828 | } |
829 | // 'shader' - other way to specify a texture |
830 | else if (TokenMatch(sz,"shader" ,6)) |
831 | { |
832 | SkipSpaces(&sz); |
833 | const char* old = sz; |
834 | while (!IsSpaceOrNewLine(*sz))++sz; |
835 | s.texFile = std::string(old, (uintptr_t)sz - (uintptr_t)old); |
836 | } |
837 | // 'l' - light source |
838 | else if (TokenMatch(sz,"l" ,1)) |
839 | { |
840 | lights.push_back(Light()); |
841 | Light& light = lights.back(); |
842 | |
843 | AI_NFF_PARSE_TRIPLE(light.position); |
844 | AI_NFF_PARSE_FLOAT (light.intensity); |
845 | AI_NFF_PARSE_TRIPLE(light.color); |
846 | } |
847 | // 's' - sphere |
848 | else if (TokenMatch(sz,"s" ,1)) |
849 | { |
850 | meshesLocked.push_back(MeshInfo(PatchType_Simple,true)); |
851 | MeshInfo& currentMesh = meshesLocked.back(); |
852 | currentMesh.shader = s; |
853 | currentMesh.shader.mapping = aiTextureMapping_SPHERE; |
854 | |
855 | AI_NFF_PARSE_SHAPE_INFORMATION(); |
856 | |
857 | // we don't need scaling or translation here - we do it in the node's transform |
858 | StandardShapes::MakeSphere(iTesselation, currentMesh.vertices); |
859 | currentMesh.faces.resize(currentMesh.vertices.size()/3,3); |
860 | |
861 | // generate a name for the mesh |
862 | ::ai_snprintf(currentMesh.name,128,"sphere_%i" ,sphere++); |
863 | } |
864 | // 'dod' - dodecahedron |
865 | else if (TokenMatch(sz,"dod" ,3)) |
866 | { |
867 | meshesLocked.push_back(MeshInfo(PatchType_Simple,true)); |
868 | MeshInfo& currentMesh = meshesLocked.back(); |
869 | currentMesh.shader = s; |
870 | currentMesh.shader.mapping = aiTextureMapping_SPHERE; |
871 | |
872 | AI_NFF_PARSE_SHAPE_INFORMATION(); |
873 | |
874 | // we don't need scaling or translation here - we do it in the node's transform |
875 | StandardShapes::MakeDodecahedron(currentMesh.vertices); |
876 | currentMesh.faces.resize(currentMesh.vertices.size()/3,3); |
877 | |
878 | // generate a name for the mesh |
879 | ::ai_snprintf(currentMesh.name,128,"dodecahedron_%i" ,dodecahedron++); |
880 | } |
881 | |
882 | // 'oct' - octahedron |
883 | else if (TokenMatch(sz,"oct" ,3)) |
884 | { |
885 | meshesLocked.push_back(MeshInfo(PatchType_Simple,true)); |
886 | MeshInfo& currentMesh = meshesLocked.back(); |
887 | currentMesh.shader = s; |
888 | currentMesh.shader.mapping = aiTextureMapping_SPHERE; |
889 | |
890 | AI_NFF_PARSE_SHAPE_INFORMATION(); |
891 | |
892 | // we don't need scaling or translation here - we do it in the node's transform |
893 | StandardShapes::MakeOctahedron(currentMesh.vertices); |
894 | currentMesh.faces.resize(currentMesh.vertices.size()/3,3); |
895 | |
896 | // generate a name for the mesh |
897 | ::ai_snprintf(currentMesh.name,128,"octahedron_%i" ,octahedron++); |
898 | } |
899 | |
900 | // 'tet' - tetrahedron |
901 | else if (TokenMatch(sz,"tet" ,3)) |
902 | { |
903 | meshesLocked.push_back(MeshInfo(PatchType_Simple,true)); |
904 | MeshInfo& currentMesh = meshesLocked.back(); |
905 | currentMesh.shader = s; |
906 | currentMesh.shader.mapping = aiTextureMapping_SPHERE; |
907 | |
908 | AI_NFF_PARSE_SHAPE_INFORMATION(); |
909 | |
910 | // we don't need scaling or translation here - we do it in the node's transform |
911 | StandardShapes::MakeTetrahedron(currentMesh.vertices); |
912 | currentMesh.faces.resize(currentMesh.vertices.size()/3,3); |
913 | |
914 | // generate a name for the mesh |
915 | ::ai_snprintf(currentMesh.name,128,"tetrahedron_%i" ,tetrahedron++); |
916 | } |
917 | |
918 | // 'hex' - hexahedron |
919 | else if (TokenMatch(sz,"hex" ,3)) |
920 | { |
921 | meshesLocked.push_back(MeshInfo(PatchType_Simple,true)); |
922 | MeshInfo& currentMesh = meshesLocked.back(); |
923 | currentMesh.shader = s; |
924 | currentMesh.shader.mapping = aiTextureMapping_BOX; |
925 | |
926 | AI_NFF_PARSE_SHAPE_INFORMATION(); |
927 | |
928 | // we don't need scaling or translation here - we do it in the node's transform |
929 | StandardShapes::MakeHexahedron(currentMesh.vertices); |
930 | currentMesh.faces.resize(currentMesh.vertices.size()/3,3); |
931 | |
932 | // generate a name for the mesh |
933 | ::ai_snprintf(currentMesh.name,128,"hexahedron_%i" ,hexahedron++); |
934 | } |
935 | // 'c' - cone |
936 | else if (TokenMatch(sz,"c" ,1)) |
937 | { |
938 | meshesLocked.push_back(MeshInfo(PatchType_Simple,true)); |
939 | MeshInfo& currentMesh = meshesLocked.back(); |
940 | currentMesh.shader = s; |
941 | currentMesh.shader.mapping = aiTextureMapping_CYLINDER; |
942 | |
943 | if(!GetNextLine(buffer,line)) |
944 | { |
945 | DefaultLogger::get()->error("NFF: Unexpected end of file (cone definition not complete)" ); |
946 | break; |
947 | } |
948 | sz = line; |
949 | |
950 | // read the two center points and the respective radii |
951 | aiVector3D center1, center2; float radius1, radius2; |
952 | AI_NFF_PARSE_TRIPLE(center1); |
953 | AI_NFF_PARSE_FLOAT(radius1); |
954 | |
955 | if(!GetNextLine(buffer,line)) |
956 | { |
957 | DefaultLogger::get()->error("NFF: Unexpected end of file (cone definition not complete)" ); |
958 | break; |
959 | } |
960 | sz = line; |
961 | |
962 | AI_NFF_PARSE_TRIPLE(center2); |
963 | AI_NFF_PARSE_FLOAT(radius2); |
964 | |
965 | // compute the center point of the cone/cylinder - |
966 | // it is its local transformation origin |
967 | currentMesh.dir = center2-center1; |
968 | currentMesh.center = center1+currentMesh.dir/(ai_real)2.0; |
969 | |
970 | float f; |
971 | if (( f = currentMesh.dir.Length()) < 10e-3f ) |
972 | { |
973 | DefaultLogger::get()->error("NFF: Cone height is close to zero" ); |
974 | continue; |
975 | } |
976 | currentMesh.dir /= f; // normalize |
977 | |
978 | // generate the cone - it consists of simple triangles |
979 | StandardShapes::MakeCone(f, radius1, radius2, |
980 | integer_pow(4, iTesselation), currentMesh.vertices); |
981 | |
982 | // MakeCone() returns tris |
983 | currentMesh.faces.resize(currentMesh.vertices.size()/3,3); |
984 | |
985 | // generate a name for the mesh. 'cone' if it a cone, |
986 | // 'cylinder' if it is a cylinder. Funny, isn't it? |
987 | if (radius1 != radius2) |
988 | ::ai_snprintf(currentMesh.name,128,"cone_%i" ,cone++); |
989 | else ::ai_snprintf(currentMesh.name,128,"cylinder_%i" ,cylinder++); |
990 | } |
991 | // 'tess' - tesselation |
992 | else if (TokenMatch(sz,"tess" ,4)) |
993 | { |
994 | SkipSpaces(&sz); |
995 | iTesselation = strtoul10(sz); |
996 | } |
997 | // 'from' - camera position |
998 | else if (TokenMatch(sz,"from" ,4)) |
999 | { |
1000 | AI_NFF_PARSE_TRIPLE(camPos); |
1001 | hasCam = true; |
1002 | } |
1003 | // 'at' - camera look-at vector |
1004 | else if (TokenMatch(sz,"at" ,2)) |
1005 | { |
1006 | AI_NFF_PARSE_TRIPLE(camLookAt); |
1007 | hasCam = true; |
1008 | } |
1009 | // 'up' - camera up vector |
1010 | else if (TokenMatch(sz,"up" ,2)) |
1011 | { |
1012 | AI_NFF_PARSE_TRIPLE(camUp); |
1013 | hasCam = true; |
1014 | } |
1015 | // 'angle' - (half?) camera field of view |
1016 | else if (TokenMatch(sz,"angle" ,5)) |
1017 | { |
1018 | AI_NFF_PARSE_FLOAT(angle); |
1019 | hasCam = true; |
1020 | } |
1021 | // 'resolution' - used to compute the screen aspect |
1022 | else if (TokenMatch(sz,"resolution" ,10)) |
1023 | { |
1024 | AI_NFF_PARSE_FLOAT(resolution.x); |
1025 | AI_NFF_PARSE_FLOAT(resolution.y); |
1026 | hasCam = true; |
1027 | } |
1028 | // 'pb' - bezier patch. Not supported yet |
1029 | else if (TokenMatch(sz,"pb" ,2)) |
1030 | { |
1031 | DefaultLogger::get()->error("NFF: Encountered unsupported ID: bezier patch" ); |
1032 | } |
1033 | // 'pn' - NURBS. Not supported yet |
1034 | else if (TokenMatch(sz,"pn" ,2) || TokenMatch(sz,"pnn" ,3)) |
1035 | { |
1036 | DefaultLogger::get()->error("NFF: Encountered unsupported ID: NURBS" ); |
1037 | } |
1038 | // '' - comment |
1039 | else if ('#' == line[0]) |
1040 | { |
1041 | const char* sz;SkipSpaces(&line[1],&sz); |
1042 | if (!IsLineEnd(*sz))DefaultLogger::get()->info(sz); |
1043 | } |
1044 | } |
1045 | } |
1046 | |
1047 | // copy all arrays into one large |
1048 | meshes.reserve (meshes.size()+meshesLocked.size()+meshesWithNormals.size()+meshesWithUVCoords.size()); |
1049 | meshes.insert (meshes.end(),meshesLocked.begin(),meshesLocked.end()); |
1050 | meshes.insert (meshes.end(),meshesWithNormals.begin(),meshesWithNormals.end()); |
1051 | meshes.insert (meshes.end(),meshesWithUVCoords.begin(),meshesWithUVCoords.end()); |
1052 | |
1053 | // now generate output meshes. first find out how many meshes we'll need |
1054 | std::vector<MeshInfo>::const_iterator it = meshes.begin(), end = meshes.end(); |
1055 | for (;it != end;++it) |
1056 | { |
1057 | if (!(*it).faces.empty()) |
1058 | { |
1059 | ++pScene->mNumMeshes; |
1060 | if ((*it).name[0])++numNamed; |
1061 | } |
1062 | } |
1063 | |
1064 | // generate a dummy root node - assign all unnamed elements such |
1065 | // as polygons and polygon patches to the root node and generate |
1066 | // sub nodes for named objects such as spheres and cones. |
1067 | aiNode* const root = new aiNode(); |
1068 | root->mName.Set("<NFF_Root>" ); |
1069 | root->mNumChildren = numNamed + (hasCam ? 1 : 0) + (unsigned int) lights.size(); |
1070 | root->mNumMeshes = pScene->mNumMeshes-numNamed; |
1071 | |
1072 | aiNode** ppcChildren = NULL; |
1073 | unsigned int* pMeshes = NULL; |
1074 | if (root->mNumMeshes) |
1075 | pMeshes = root->mMeshes = new unsigned int[root->mNumMeshes]; |
1076 | if (root->mNumChildren) |
1077 | ppcChildren = root->mChildren = new aiNode*[root->mNumChildren]; |
1078 | |
1079 | // generate the camera |
1080 | if (hasCam) |
1081 | { |
1082 | ai_assert(ppcChildren); |
1083 | aiNode* nd = new aiNode(); |
1084 | *ppcChildren = nd; |
1085 | nd->mName.Set("<NFF_Camera>" ); |
1086 | nd->mParent = root; |
1087 | |
1088 | // allocate the camera in the scene |
1089 | pScene->mNumCameras = 1; |
1090 | pScene->mCameras = new aiCamera*[1]; |
1091 | aiCamera* c = pScene->mCameras[0] = new aiCamera; |
1092 | |
1093 | c->mName = nd->mName; // make sure the names are identical |
1094 | c->mHorizontalFOV = AI_DEG_TO_RAD( angle ); |
1095 | c->mLookAt = camLookAt - camPos; |
1096 | c->mPosition = camPos; |
1097 | c->mUp = camUp; |
1098 | |
1099 | // If the resolution is not specified in the file, we |
1100 | // need to set 1.0 as aspect. |
1101 | c->mAspect = (!resolution.y ? 0.f : resolution.x / resolution.y); |
1102 | ++ppcChildren; |
1103 | } |
1104 | |
1105 | // generate light sources |
1106 | if (!lights.empty()) |
1107 | { |
1108 | ai_assert(ppcChildren); |
1109 | pScene->mNumLights = (unsigned int)lights.size(); |
1110 | pScene->mLights = new aiLight*[pScene->mNumLights]; |
1111 | for (unsigned int i = 0; i < pScene->mNumLights;++i,++ppcChildren) |
1112 | { |
1113 | const Light& l = lights[i]; |
1114 | |
1115 | aiNode* nd = new aiNode(); |
1116 | *ppcChildren = nd; |
1117 | nd->mParent = root; |
1118 | |
1119 | nd->mName.length = ::ai_snprintf(nd->mName.data,1024,"<NFF_Light%u>" ,i); |
1120 | |
1121 | // allocate the light in the scene data structure |
1122 | aiLight* out = pScene->mLights[i] = new aiLight(); |
1123 | out->mName = nd->mName; // make sure the names are identical |
1124 | out->mType = aiLightSource_POINT; |
1125 | out->mColorDiffuse = out->mColorSpecular = l.color * l.intensity; |
1126 | out->mPosition = l.position; |
1127 | } |
1128 | } |
1129 | |
1130 | if (!pScene->mNumMeshes)throw DeadlyImportError("NFF: No meshes loaded" ); |
1131 | pScene->mMeshes = new aiMesh*[pScene->mNumMeshes]; |
1132 | pScene->mMaterials = new aiMaterial*[pScene->mNumMaterials = pScene->mNumMeshes]; |
1133 | unsigned int m = 0; |
1134 | for (it = meshes.begin(); it != end;++it) |
1135 | { |
1136 | if ((*it).faces.empty())continue; |
1137 | |
1138 | const MeshInfo& src = *it; |
1139 | aiMesh* const mesh = pScene->mMeshes[m] = new aiMesh(); |
1140 | mesh->mNumVertices = (unsigned int)src.vertices.size(); |
1141 | mesh->mNumFaces = (unsigned int)src.faces.size(); |
1142 | |
1143 | // Generate sub nodes for named meshes |
1144 | if ( src.name[ 0 ] && NULL != ppcChildren ) { |
1145 | aiNode* const node = *ppcChildren = new aiNode(); |
1146 | node->mParent = root; |
1147 | node->mNumMeshes = 1; |
1148 | node->mMeshes = new unsigned int[1]; |
1149 | node->mMeshes[0] = m; |
1150 | node->mName.Set(src.name); |
1151 | |
1152 | // setup the transformation matrix of the node |
1153 | aiMatrix4x4::FromToMatrix(aiVector3D(0.f,1.f,0.f), |
1154 | src.dir,node->mTransformation); |
1155 | |
1156 | aiMatrix4x4& mat = node->mTransformation; |
1157 | mat.a1 *= src.radius.x; mat.b1 *= src.radius.x; mat.c1 *= src.radius.x; |
1158 | mat.a2 *= src.radius.y; mat.b2 *= src.radius.y; mat.c2 *= src.radius.y; |
1159 | mat.a3 *= src.radius.z; mat.b3 *= src.radius.z; mat.c3 *= src.radius.z; |
1160 | mat.a4 = src.center.x; |
1161 | mat.b4 = src.center.y; |
1162 | mat.c4 = src.center.z; |
1163 | |
1164 | ++ppcChildren; |
1165 | } else { |
1166 | *pMeshes++ = m; |
1167 | } |
1168 | |
1169 | // copy vertex positions |
1170 | mesh->mVertices = new aiVector3D[mesh->mNumVertices]; |
1171 | ::memcpy(mesh->mVertices,&src.vertices[0], |
1172 | sizeof(aiVector3D)*mesh->mNumVertices); |
1173 | |
1174 | // NFF2: there could be vertex colors |
1175 | if (!src.colors.empty()) |
1176 | { |
1177 | ai_assert(src.colors.size() == src.vertices.size()); |
1178 | |
1179 | // copy vertex colors |
1180 | mesh->mColors[0] = new aiColor4D[mesh->mNumVertices]; |
1181 | ::memcpy(mesh->mColors[0],&src.colors[0], |
1182 | sizeof(aiColor4D)*mesh->mNumVertices); |
1183 | } |
1184 | |
1185 | if (!src.normals.empty()) |
1186 | { |
1187 | ai_assert(src.normals.size() == src.vertices.size()); |
1188 | |
1189 | // copy normal vectors |
1190 | mesh->mNormals = new aiVector3D[mesh->mNumVertices]; |
1191 | ::memcpy(mesh->mNormals,&src.normals[0], |
1192 | sizeof(aiVector3D)*mesh->mNumVertices); |
1193 | } |
1194 | |
1195 | if (!src.uvs.empty()) |
1196 | { |
1197 | ai_assert(src.uvs.size() == src.vertices.size()); |
1198 | |
1199 | // copy texture coordinates |
1200 | mesh->mTextureCoords[0] = new aiVector3D[mesh->mNumVertices]; |
1201 | ::memcpy(mesh->mTextureCoords[0],&src.uvs[0], |
1202 | sizeof(aiVector3D)*mesh->mNumVertices); |
1203 | } |
1204 | |
1205 | // generate faces |
1206 | unsigned int p = 0; |
1207 | aiFace* pFace = mesh->mFaces = new aiFace[mesh->mNumFaces]; |
1208 | for (std::vector<unsigned int>::const_iterator it2 = src.faces.begin(), |
1209 | end2 = src.faces.end(); |
1210 | it2 != end2;++it2,++pFace) |
1211 | { |
1212 | pFace->mIndices = new unsigned int [ pFace->mNumIndices = *it2 ]; |
1213 | for (unsigned int o = 0; o < pFace->mNumIndices;++o) |
1214 | pFace->mIndices[o] = p++; |
1215 | } |
1216 | |
1217 | // generate a material for the mesh |
1218 | aiMaterial* pcMat = (aiMaterial*)(pScene->mMaterials[m] = new aiMaterial()); |
1219 | |
1220 | mesh->mMaterialIndex = m++; |
1221 | |
1222 | aiString s; |
1223 | s.Set(AI_DEFAULT_MATERIAL_NAME); |
1224 | pcMat->AddProperty(&s, AI_MATKEY_NAME); |
1225 | |
1226 | // FIX: Ignore diffuse == 0 |
1227 | aiColor3D c = src.shader.color * (src.shader.diffuse.r ? src.shader.diffuse : aiColor3D(1.f,1.f,1.f)); |
1228 | pcMat->AddProperty(&c,1,AI_MATKEY_COLOR_DIFFUSE); |
1229 | c = src.shader.color * src.shader.specular; |
1230 | pcMat->AddProperty(&c,1,AI_MATKEY_COLOR_SPECULAR); |
1231 | |
1232 | // NFF2 - default values for NFF |
1233 | pcMat->AddProperty(&src.shader.ambient, 1,AI_MATKEY_COLOR_AMBIENT); |
1234 | pcMat->AddProperty(&src.shader.emissive,1,AI_MATKEY_COLOR_EMISSIVE); |
1235 | pcMat->AddProperty(&src.shader.opacity, 1,AI_MATKEY_OPACITY); |
1236 | |
1237 | // setup the first texture layer, if existing |
1238 | if (src.shader.texFile.length()) |
1239 | { |
1240 | s.Set(src.shader.texFile); |
1241 | pcMat->AddProperty(&s,AI_MATKEY_TEXTURE_DIFFUSE(0)); |
1242 | |
1243 | if (aiTextureMapping_UV != src.shader.mapping) { |
1244 | |
1245 | aiVector3D v(0.f,-1.f,0.f); |
1246 | pcMat->AddProperty(&v, 1,AI_MATKEY_TEXMAP_AXIS_DIFFUSE(0)); |
1247 | pcMat->AddProperty((int*)&src.shader.mapping, 1,AI_MATKEY_MAPPING_DIFFUSE(0)); |
1248 | } |
1249 | } |
1250 | |
1251 | // setup the name of the material |
1252 | if (src.shader.name.length()) |
1253 | { |
1254 | s.Set(src.shader.texFile); |
1255 | pcMat->AddProperty(&s,AI_MATKEY_NAME); |
1256 | } |
1257 | |
1258 | // setup some more material properties that are specific to NFF2 |
1259 | int i; |
1260 | if (src.shader.twoSided) |
1261 | { |
1262 | i = 1; |
1263 | pcMat->AddProperty(&i,1,AI_MATKEY_TWOSIDED); |
1264 | } |
1265 | i = (src.shader.shaded ? aiShadingMode_Gouraud : aiShadingMode_NoShading); |
1266 | if (src.shader.shininess) |
1267 | { |
1268 | i = aiShadingMode_Phong; |
1269 | pcMat->AddProperty(&src.shader.shininess,1,AI_MATKEY_SHININESS); |
1270 | } |
1271 | pcMat->AddProperty(&i,1,AI_MATKEY_SHADING_MODEL); |
1272 | } |
1273 | pScene->mRootNode = root; |
1274 | } |
1275 | |
1276 | #endif // !! ASSIMP_BUILD_NO_NFF_IMPORTER |
1277 | |