00001 /********************************************************************** 00002 tokenst.h - Tokenize and trim strings; Open data files 00003 00004 Copyright (C) 1998-2001 by OpenEye Scientific Software, Inc. 00005 Some portions Copyright (C) 2001-2006 by Geoffrey R. Hutchison 00006 00007 This file is part of the Open Babel project. 00008 For more information, see <http://openbabel.sourceforge.net/> 00009 00010 This program is free software; you can redistribute it and/or modify 00011 it under the terms of the GNU General Public License as published by 00012 the Free Software Foundation version 2 of the License. 00013 00014 This program is distributed in the hope that it will be useful, 00015 but WITHOUT ANY WARRANTY; without even the implied warranty of 00016 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 00017 GNU General Public License for more details. 00018 ***********************************************************************/ 00019 00020 #ifndef OB_TOKENST_H 00021 #define OB_TOKENST_H 00022 00023 #include <openbabel/babelconfig.h> 00024 #include <vector> 00025 #include <string> 00026 #include <fstream> 00027 00028 namespace OpenBabel 00029 { 00030 // Utility function prototypes 00031 OBERROR bool tokenize(std::vector<std::string>&, const char *buf, const char *delimstr=" \t\n\r"); 00032 OBERROR bool tokenize(std::vector<std::string>&, std::string&, const char *delimstr=" \t\n\r", int limit=-1); 00033 // Remove leading and trailing whitespace from a string (docs in tokenst.cpp) 00034 OBERROR std::string& Trim(std::string& txt); 00035 00037 // full documentation in tokenst.cpp 00038 OBERROR std::string OpenDatafile(std::ifstream& fs, 00039 const std::string& filename, 00040 const std::string& envvar = "BABEL_DATADIR"); 00041 00042 // Used by other code for reading files 00043 #ifdef WIN32 00044 #define FILE_SEP_CHAR "\\" 00045 #else 00046 #define FILE_SEP_CHAR "/" 00047 #endif 00048 00049 00050 }//namespace 00051 #endif 00052
This file is part of the documentation for Open Babel, version 2.2.0.