diff --git a/ton-test-liteclient-full/lite-client-with-webserver/CMake/AddCXXCompilerFlag.cmake b/ton-test-liteclient-full/lite-client-with-webserver/CMake/AddCXXCompilerFlag.cmake new file mode 100644 index 0000000..f316144 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/CMake/AddCXXCompilerFlag.cmake @@ -0,0 +1,66 @@ +# - Adds a compiler flag if it is supported by the compiler +# +# This function checks that the supplied compiler flag is supported and then +# adds it to the corresponding compiler flags +# +# add_cxx_compiler_flag( []) +# +# - Example +# +# include(AddCXXCompilerFlag) +# add_cxx_compiler_flag(-Wall) +# add_cxx_compiler_flag(-no-strict-aliasing RELEASE) +# Requires CMake 2.6+ + +if(__add_cxx_compiler_flag) + return() +endif() +set(__add_cxx_compiler_flag INCLUDED) + +include(CheckCXXCompilerFlag) + +function(mangle_compiler_flag FLAG OUTPUT) + string(TOUPPER "HAVE_CXX_FLAG_${FLAG}" SANITIZED_FLAG) + string(REPLACE "+" "X" SANITIZED_FLAG ${SANITIZED_FLAG}) + string(REGEX REPLACE "[^A-Za-z_0-9]" "_" SANITIZED_FLAG ${SANITIZED_FLAG}) + string(REGEX REPLACE "_+" "_" SANITIZED_FLAG ${SANITIZED_FLAG}) + set(${OUTPUT} "${SANITIZED_FLAG}" PARENT_SCOPE) +endfunction(mangle_compiler_flag) + +function(add_cxx_compiler_flag FLAG) + string(REPLACE "-Wno-" "-W" MAIN_FLAG ${FLAG}) + mangle_compiler_flag("${MAIN_FLAG}" MANGLED_FLAG) + set(OLD_CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS}") + set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${FLAG}") + check_cxx_compiler_flag("${MAIN_FLAG}" ${MANGLED_FLAG}) + set(CMAKE_REQUIRED_FLAGS "${OLD_CMAKE_REQUIRED_FLAGS}") + if(${MANGLED_FLAG}) + set(VARIANT ${ARGV1}) + if(ARGV1) + string(TOUPPER "_${VARIANT}" VARIANT) + endif() + set(CMAKE_CXX_FLAGS${VARIANT} "${CMAKE_CXX_FLAGS${VARIANT}} ${FLAG}" PARENT_SCOPE) + endif() +endfunction() + +function(add_required_cxx_compiler_flag FLAG) + string(REPLACE "-Wno-" "-W" MAIN_FLAG ${FLAG}) + mangle_compiler_flag("${MAIN_FLAG}" MANGLED_FLAG) + set(OLD_CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS}") + set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${FLAG}") + check_cxx_compiler_flag("${MAIN_FLAG}" ${MANGLED_FLAG}) + set(CMAKE_REQUIRED_FLAGS "${OLD_CMAKE_REQUIRED_FLAGS}") + if(${MANGLED_FLAG}) + set(VARIANT ${ARGV1}) + if(ARGV1) + string(TOUPPER "_${VARIANT}" VARIANT) + endif() + set(CMAKE_CXX_FLAGS${VARIANT} "${CMAKE_CXX_FLAGS${VARIANT}} ${FLAG}" PARENT_SCOPE) + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${FLAG}" PARENT_SCOPE) + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${FLAG}" PARENT_SCOPE) + set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${FLAG}" PARENT_SCOPE) + set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${FLAG}" PARENT_SCOPE) + else() + message(FATAL_ERROR "Required flag '${FLAG}' is not supported by the compiler") + endif() +endfunction() diff --git a/ton-test-liteclient-full/lite-client-with-webserver/CMake/FindMHD.cmake b/ton-test-liteclient-full/lite-client-with-webserver/CMake/FindMHD.cmake new file mode 100644 index 0000000..56f5668 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/CMake/FindMHD.cmake @@ -0,0 +1,38 @@ +# - Try to find MHD +# Once done this will define +# +# MHD_FOUND - system has MHD +# MHD_INCLUDE_DIRS - the MHD include directory +# MHD_LIBRARY - Link these to use MHD + +find_path( + MHD_INCLUDE_DIR + NAMES microhttpd.h + DOC "microhttpd include dir" +) + +find_library( + MHD_LIBRARY + NAMES microhttpd microhttpd-10 libmicrohttpd libmicrohttpd-dll + DOC "microhttpd library" +) + +set(MHD_INCLUDE_DIRS ${MHD_INCLUDE_DIR}) +set(MHD_LIBRARIES ${MHD_LIBRARY}) + +# debug library on windows +# same naming convention as in qt (appending debug library with d) +# boost is using the same "hack" as us with "optimized" and "debug" +# official MHD project actually uses _d suffix +if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC") + find_library( + MHD_LIBRARY_DEBUG + NAMES microhttpd_d microhttpd-10_d libmicrohttpd_d libmicrohttpd-dll_d + DOC "mhd debug library" + ) + set(MHD_LIBRARIES optimized ${MHD_LIBRARIES} debug ${MHD_LIBRARY_DEBUG}) +endif() + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(mhd DEFAULT_MSG MHD_INCLUDE_DIR MHD_LIBRARY) +mark_as_advanced(MHD_INCLUDE_DIR MHD_LIBRARY) diff --git a/ton-test-liteclient-full/lite-client-with-webserver/CMake/FindReadline.cmake b/ton-test-liteclient-full/lite-client-with-webserver/CMake/FindReadline.cmake new file mode 100644 index 0000000..3b1892c --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/CMake/FindReadline.cmake @@ -0,0 +1,25 @@ +if (APPLE) + find_path(READLINE_INCLUDE_DIR readline/readline.h /usr/local/opt/readline/include /opt/local/include /opt/include /usr/local/include /usr/include NO_DEFAULT_PATH) +endif() +find_path(READLINE_INCLUDE_DIR readline/readline.h) + +if (APPLE) + find_library(READLINE_LIBRARY readline /usr/local/opt/readline/lib /opt/local/lib /opt/lib /usr/local/lib /usr/lib NO_DEFAULT_PATH) +endif() +find_library(READLINE_LIBRARY readline) + +if (READLINE_INCLUDE_DIR AND READLINE_LIBRARY AND NOT GNU_READLINE_FOUND) + set(CMAKE_REQUIRED_INCLUDES "${READLINE_INCLUDE_DIR}") + set(CMAKE_REQUIRED_LIBRARIES "${READLINE_LIBRARY}") + include(CheckCXXSourceCompiles) + unset(GNU_READLINE_FOUND CACHE) + check_cxx_source_compiles("#include \n#include \nint main() { rl_replace_line(\"\", 0); }" GNU_READLINE_FOUND) + if (NOT GNU_READLINE_FOUND) + unset(READLINE_INCLUDE_DIR CACHE) + unset(READLINE_LIBRARY CACHE) + endif() +endif() + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(Readline DEFAULT_MSG READLINE_INCLUDE_DIR READLINE_LIBRARY) +mark_as_advanced(READLINE_INCLUDE_DIR READLINE_LIBRARY) diff --git a/ton-test-liteclient-full/lite-client-with-webserver/CMake/UseLATEX.cmake b/ton-test-liteclient-full/lite-client-with-webserver/CMake/UseLATEX.cmake new file mode 100644 index 0000000..5380522 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/CMake/UseLATEX.cmake @@ -0,0 +1,1936 @@ +# File: UseLATEX.cmake +# CMAKE commands to actually use the LaTeX compiler +# Version: 2.4.6 +# Author: Kenneth Moreland +# +# Copyright 2004, 2015 Sandia Corporation. +# Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive +# license for use of this work by or on behalf of the U.S. Government. +# +# This software is released under the BSD 3-Clause License. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# +# The following function is defined: +# +# add_latex_document( +# [BIBFILES ] +# [INPUTS ] +# [IMAGE_DIRS] +# [IMAGES] +# [CONFIGURE] +# [DEPENDS] +# [MULTIBIB_NEWCITES] +# [USE_BIBLATEX] +# [USE_INDEX] +# [INDEX_NAMES ] +# [USE_GLOSSARY] [USE_NOMENCL] +# [FORCE_PDF] [FORCE_DVI] [FORCE_HTML] +# [TARGET_NAME] +# [EXCLUDE_FROM_ALL] +# [EXCLUDE_FROM_DEFAULTS]) +# Adds targets that compile . The latex output is placed +# in LATEX_OUTPUT_PATH or CMAKE_CURRENT_BINARY_DIR if the former is +# not set. The latex program is picky about where files are located, +# so all input files are copied from the source directory to the +# output directory. This includes the target tex file, any tex file +# listed with the INPUTS option, the bibliography files listed with +# the BIBFILES option, and any .cls, .bst, .clo, .sty, .ist, and .fd +# files found in the current source directory. Images found in the +# IMAGE_DIRS directories or listed by IMAGES are also copied to the +# output directory and converted to an appropriate format if necessary. +# Any tex files also listed with the CONFIGURE option are also processed +# with the CMake CONFIGURE_FILE command (with the @ONLY flag). Any file +# listed in CONFIGURE but not the target tex file or listed with INPUTS +# has no effect. DEPENDS can be used to specify generated files that are +# needed to compile the latex target. +# +# The following targets are made. The name prefix is based off of the +# base name of the tex file unless TARGET_NAME is specified. If +# TARGET_NAME is specified, then that name is used for the targets. +# +# name_dvi: Makes .dvi +# name_pdf: Makes .pdf using pdflatex. +# name_safepdf: Makes .pdf using ps2pdf. If using the +# default program arguments, this will ensure all fonts +# are embedded and no lossy compression has been +# performed on images. +# name_ps: Makes .ps +# name_html: Makes .html +# name_auxclean: Deletes .aux and other auxiliary files. +# This is sometimes necessary if a LaTeX error occurs +# and writes a bad aux file. Unlike the regular clean +# target, it does not delete other input files, such as +# converted images, to save time on the rebuild. +# +# Unless the EXCLUDE_FROM_ALL option is given, one of these targets +# is added to the ALL target and built by default. Which target is +# determined by the LATEX_DEFAULT_BUILD CMake variable. See the +# documentation of that variable for more details. +# +# Unless the EXCLUDE_FROM_DEFAULTS option is given, all these targets +# are added as dependencies to targets named dvi, pdf, safepdf, ps, +# html, and auxclean, respectively. +# +# USE_BIBLATEX enables the use of biblatex/biber as an alternative to +# bibtex. Bibtex remains the default if USE_BIBLATEX is not +# specified. +# +# If the argument USE_INDEX is given, then commands to build an index +# are made. If the argument INDEX_NAMES is given, an index file is +# generated for each name in this list. See the LaTeX package multind +# for more information about how to generate multiple indices. +# +# If the argument USE_GLOSSARY is given, then commands to +# build a glossary are made. If the argument MULTIBIB_NEWCITES is +# given, then additional bibtex calls are added to the build to +# support the extra auxiliary files created with the \newcite command +# in the multibib package. +# +# History: +# +# 2.4.6 Fix parse issue with older versions of CMake. +# +# 2.4.5 Fix issues with files and paths containing spaces. +# +# 2.4.4 Improve error reporting message when LaTeX fails. +# +# When LaTeX fails, delete the output file, which is invalid. +# +# Add warnings for "missing characters." These usually mean that a +# non-ASCII character is in the document and will not be printed +# correctly. +# +# 2.4.3 Check for warnings from the natbib package. When using natbib, +# warnings for missing bibliography references look different. So +# far, natbib seems to be quiet unless something is important, so +# look for all natbib warnings. (We can change this later if +# necessary.) +# +# 2.4.2 Fix an issue where new versions of ImageMagick expect the order of +# options in command line execution of magick/convert. (See, for +# example, http://www.imagemagick.org/Usage/basics/#why.) +# +# 2.4.1 Add ability to dump LaTeX log file when using batch mode. Batch +# mode suppresses most output, often including error messages. To +# make sure critical error messages get displayed, show the full log +# on failures. +# +# 2.4.0 Remove "-r 600" from the default PDFTOPS_CONVERTER_FLAGS. The -r flag +# is available from the Poppler version of pdftops, but not the Xpdf +# version. +# +# Fix an issue with the flags for the different programs not being +# properly separated. +# +# Fix an issue on windows where the = character is not allowed for +# ps2pdf arguments. +# +# Change default arguments for latex and pdflatex commands. Makes the +# output more quiet and prints out the file/line where errors occur. +# (Thanks to Nikos Koukis.) +# +# After a LaTeX build, check the log file for warnings that are +# indicative of problems with the build. +# +# Remove support for latex2html. Instead, use the htlatex program. +# This is now part of TeX Live and most other distributions. It also +# behaves much more like the other LaTeX programs. Also fixed some +# nasty issues with the htlatex arguments. +# +# 2.3.2 Declare LaTeX input files as sources for targets so that they show +# up in IDEs like QtCreator. +# +# Fix issue where main tex files in subdirectories were creating +# invalid targets for building HTML. Just disable the HTML targets in +# this case. +# +# 2.3.1 Support use of magick command instead of convert command for +# ImageMagick 7. +# +# 2.3.0 Add USE_BIBLATEX option to support the biblatex package, which +# requires using the program biber as a replacement for bibtex +# (thanks to David Tracey). +# +# 2.2.1 Add STRINGS property to LATEX_DEFAULT_BUILD to make it easier to +# select the default build in the CMake GUI. +# +# 2.2.0 Add TARGET_NAME option. +# +# 2.1.1 Support for finding bmp, ppm, and other image files. +# +# 2.1.0 Fix an error where the pdf target and others were defined multiple +# times if UseLATEX.cmake was included multiple times. +# +# Added INDEX_NAMES option to support multiple indexes in a single +# document from the multind package (thanks to Dan Lipsa). +# +# 2.0.0 First major revision of UseLATEX.cmake updates to more recent features +# of CMake and some non-backward compatible changes. +# +# Changed all function and macro names to lower case. CMake's identifiers +# are case insensitive, but the convention moved from all upper case to +# all lower case somewhere around the release of CMake 2. (The original +# version of UseLATEX.cmake predates that.) +# +# Remove condition matching in if statements. They are no longer necessary +# and are even discouraged (because else clauses get confusing). +# +# Use "new" features available in CMake such as list and argument parsing. +# +# Remove some code that has been deprecated for a while. +# +# Mark variables for compiler and converter executables as advanced to +# match the more conventional CMake behavior. +# +# Changed how default builds are specified and add the ability to force +# a particular build. +# +# Made the base targets (pdf, dvi, etc.) global. add_latex_document +# always mangles its target names and these base targets depend on +# the targets with mangled names. +# +# 1.10.5 Fix for Window's convert check (thanks to Martin Baute). +# +# 1.10.4 Copy font files to binary directory for packages that come with +# their own fonts. +# +# 1.10.3 Check for Windows version of convert being used instead of +# ImageMagick's version (thanks to Martin Baute). +# +# 1.10.2 Use htlatex as a fallback when latex2html is not available (thanks +# to Tomasz Grzegurzko). +# +# 1.10.1 Make convert program mandatory only if actually used (thanks to +# Julien Schueller). +# +# 1.10.0 Added NO_DEFAULT and DEFAULT_PS options. +# Fixed issue with cleaning files for LaTeX documents originating in +# a subdirectory. +# +# 1.9.6 Fixed problem with LATEX_SMALL_IMAGES. +# Strengthened check to make sure the output directory does not contain +# the source files. +# +# 1.9.5 Add support for image types not directly supported by either latex +# or pdflatex. (Thanks to Jorge Gerardo Pena Pastor for SVG support.) +# +# 1.9.4 Fix issues with filenames containing multiple periods. +# +# 1.9.3 Hide some variables that are now cached but should not show up in +# the ccmake list of variables. +# +# 1.9.2 Changed MACRO declarations to FUNCTION declarations. The better +# FUNCTION scoping will hopefully avoid some common but subtle bugs. +# This implicitly increases the minimum CMake version to 4.6 (although +# I honestly only test it with the latest 4.8 version). +# +# Since we are updating the minimum CMake version, I'm going to start +# using the builtin LIST commands that are now available. +# +# Favor using pdftops from the Poppler package to convert from pdf to +# eps. It does a much better job than ImageMagick or ghostscript. +# +# 1.9.1 Fixed typo that caused the LATEX_SMALL_IMAGES option to fail to +# activate. +# +# 1.9.0 Add support for the multibib package (thanks to Antonio LaTorre). +# +# 1.8.2 Fix corner case when an argument name was also a variable containing +# the text of an argument. In this case, the CMake IF was matching +# the argument text with the contents of the variable with the same +# argument name. +# +# 1.8.1 Fix problem where ps2pdf was not getting the appropriate arguments. +# +# 1.8.0 Add support for synctex. +# +# 1.7.7 Support calling xindy when making glossaries. +# +# Improved make clean support. +# +# 1.7.6 Add support for the nomencl package (thanks to Myles English). +# +# 1.7.5 Fix issue with bibfiles being copied two different ways, which causes +# Problems with dependencies (thanks to Edwin van Leeuwen). +# +# 1.7.4 Added the DEFAULT_SAFEPDF option (thanks to Raymond Wan). +# +# Added warnings when image directories are not found (and were +# probably not given relative to the source directory). +# +# 1.7.3 Fix some issues with interactions between makeglossaries and bibtex +# (thanks to Mark de Wever). +# +# 1.7.2 Use ps2pdf to convert eps to pdf to get around the problem with +# ImageMagick dropping the bounding box (thanks to Lukasz Lis). +# +# 1.7.1 Fixed some dependency issues. +# +# 1.7.0 Added DEPENDS options (thanks to Theodore Papadopoulo). +# +# 1.6.1 Ported the makeglossaries command to CMake and embedded the port +# into UseLATEX.cmake. +# +# 1.6.0 Allow the use of the makeglossaries command. Thanks to Oystein +# S. Haaland for the patch. +# +# 1.5.0 Allow any type of file in the INPUTS lists, not just tex file +# (suggested by Eric Noulard). As a consequence, the ability to +# specify tex files without the .tex extension is removed. The removed +# function is of dubious value anyway. +# +# When copying input files, skip over any file that exists in the +# binary directory but does not exist in the source directory with the +# assumption that these files were added by some other mechanism. I +# find this useful when creating large documents with multiple +# chapters that I want to build separately (for speed) as I work on +# them. I use the same boilerplate as the starting point for all +# and just copy it with different configurations. This was what the +# separate ADD_LATEX_DOCUMENT method was supposed to originally be for. +# Since its external use is pretty much deprecated, I removed that +# documentation. +# +# 1.4.1 Copy .sty files along with the other class and package files. +# +# 1.4.0 Added a MANGLE_TARGET_NAMES option that will mangle the target names. +# +# Fixed problem with copying bib files that became apparent with +# CMake 2.4. +# +# 1.3.0 Added a LATEX_OUTPUT_PATH variable that allows you or the user to +# specify where the built latex documents to go. This is especially +# handy if you want to do in-source builds. +# +# Removed the ADD_LATEX_IMAGES macro and absorbed the functionality +# into ADD_LATEX_DOCUMENT. The old interface was always kind of +# clunky anyway since you had to specify the image directory in both +# places. It also made supporting LATEX_OUTPUT_PATH problematic. +# +# Added support for jpeg files. +# +# 1.2.0 Changed the configuration options yet again. Removed the NO_CONFIGURE +# Replaced it with a CONFIGURE option that lists input files for which +# configure should be run. +# +# The pdf target no longer depends on the dvi target. This allows you +# to build latex documents that require pdflatex. Also added an option +# to make the pdf target the default one. +# +# 1.1.1 Added the NO_CONFIGURE option. The @ character can be used when +# specifying table column separators. If two or more are used, then +# will incorrectly substitute them. +# +# 1.1.0 Added ability include multiple bib files. Added ability to do copy +# sub-tex files for multipart tex files. +# +# 1.0.0 If both ps and pdf type images exist, just copy the one that +# matches the current render mode. Replaced a bunch of STRING +# commands with GET_FILENAME_COMPONENT commands that were made to do +# the desired function. +# +# 0.4.0 First version posted to CMake Wiki. +# + +if(__USE_LATEX_INCLUDED) + return() +endif() +set(__USE_LATEX_INCLUDED TRUE) + +############################################################################# +# Find the location of myself while originally executing. If you do this +# inside of a macro, it will recode where the macro was invoked. +############################################################################# +set(LATEX_USE_LATEX_LOCATION ${CMAKE_CURRENT_LIST_FILE} + CACHE INTERNAL "Location of UseLATEX.cmake file." FORCE + ) + +############################################################################# +# Generic helper functions +############################################################################# + +include(CMakeParseArguments) + +function(latex_list_contains var value) + set(input_list ${ARGN}) + list(FIND input_list "${value}" index) + if(index GREATER -1) + set(${var} TRUE PARENT_SCOPE) + else() + set(${var} PARENT_SCOPE) + endif() +endfunction(latex_list_contains) + +# Match the contents of a file to a regular expression. +function(latex_file_match variable filename regexp default) + # The FILE STRINGS command would be a bit better, but I'm not totally sure + # the match will always be to a whole line, and I don't want to break things. + file(READ ${filename} file_contents) + string(REGEX MATCHALL "${regexp}" + match_result ${file_contents} + ) + if(match_result) + set(${variable} "${match_result}" PARENT_SCOPE) + else() + set(${variable} "${default}" PARENT_SCOPE) + endif() +endfunction(latex_file_match) + +# A version of GET_FILENAME_COMPONENT that treats extensions after the last +# period rather than the first. To the best of my knowledge, all filenames +# typically used by LaTeX, including image files, have small extensions +# after the last dot. +function(latex_get_filename_component varname filename type) + set(result) + if("${type}" STREQUAL "NAME_WE") + get_filename_component(name ${filename} NAME) + string(REGEX REPLACE "\\.[^.]*\$" "" result "${name}") + elseif("${type}" STREQUAL "EXT") + get_filename_component(name ${filename} NAME) + string(REGEX MATCH "\\.[^.]*\$" result "${name}") + else() + get_filename_component(result ${filename} ${type}) + endif() + set(${varname} "${result}" PARENT_SCOPE) +endfunction(latex_get_filename_component) + +############################################################################# +# Functions that perform processing during a LaTeX build. +############################################################################# +function(latex_execute_latex) + if(NOT LATEX_TARGET) + message(SEND_ERROR "Need to define LATEX_TARGET") + endif() + + if(NOT LATEX_WORKING_DIRECTORY) + message(SEND_ERROR "Need to define LATEX_WORKING_DIRECTORY") + endif() + + if(NOT LATEX_FULL_COMMAND) + message(SEND_ERROR "Need to define LATEX_FULL_COMMAND") + endif() + + if(NOT LATEX_OUTPUT_FILE) + message(SEND_ERROR "Need to define LATEX_OUTPUT_FILE") + endif() + + set(full_command_original "${LATEX_FULL_COMMAND}") + + # Chose the native method for parsing command arguments. Newer versions of + # CMake allow you to just use NATIVE_COMMAND. + if (CMAKE_VERSION VERSION_GREATER 3.8) + set(separate_arguments_mode NATIVE_COMMAND) + else() + if (WIN32) + set(separate_arguments_mode WINDOWS_COMMAND) + else() + set(separate_arguments_mode UNIX_COMMAND) + endif() + endif() + + # Preps variables for use in execute_process. + # Even though we expect LATEX_WORKING_DIRECTORY to have a single "argument," + # we also want to make sure that we strip out any escape characters that can + # foul up the WORKING_DIRECTORY argument. + separate_arguments(LATEX_FULL_COMMAND UNIX_COMMAND "${LATEX_FULL_COMMAND}") + separate_arguments(LATEX_WORKING_DIRECTORY UNIX_COMMAND "${LATEX_WORKING_DIRECTORY}") + + execute_process( + COMMAND ${LATEX_FULL_COMMAND} + WORKING_DIRECTORY ${LATEX_WORKING_DIRECTORY} + RESULT_VARIABLE execute_result + ) + + if(NOT ${execute_result} EQUAL 0) + # LaTeX tends to write a file when a failure happens. Delete that file so + # that LaTeX will run again. + file(REMOVE "${LATEX_WORKING_DIRECTORY}/${LATEX_OUTPUT_FILE}") + + message("\n\nLaTeX command failed") + message("${full_command_original}") + message("Log output:") + file(READ ${LATEX_WORKING_DIRECTORY}/${LATEX_TARGET}.log log_output) + message("${log_output}") + message(FATAL_ERROR + "Successfully executed LaTeX, but LaTeX returned an error.") + endif() +endfunction(latex_execute_latex) + +function(latex_makeglossaries) + # This is really a bare bones port of the makeglossaries perl script into + # CMake scripting. + message("**************************** In makeglossaries") + if(NOT LATEX_TARGET) + message(SEND_ERROR "Need to define LATEX_TARGET") + endif() + + set(aux_file ${LATEX_TARGET}.aux) + + if(NOT EXISTS ${aux_file}) + message(SEND_ERROR "${aux_file} does not exist. Run latex on your target file.") + endif() + + latex_file_match(newglossary_lines ${aux_file} + "@newglossary[ \t]*{([^}]*)}{([^}]*)}{([^}]*)}{([^}]*)}" + "@newglossary{main}{glg}{gls}{glo}" + ) + + latex_file_match(istfile_line ${aux_file} + "@istfilename[ \t]*{([^}]*)}" + "@istfilename{${LATEX_TARGET}.ist}" + ) + string(REGEX REPLACE "@istfilename[ \t]*{([^}]*)}" "\\1" + istfile ${istfile_line} + ) + + string(REGEX MATCH ".*\\.xdy" use_xindy "${istfile}") + if(use_xindy) + message("*************** Using xindy") + if(NOT XINDY_COMPILER) + message(SEND_ERROR "Need to define XINDY_COMPILER") + endif() + else() + message("*************** Using makeindex") + if(NOT MAKEINDEX_COMPILER) + message(SEND_ERROR "Need to define MAKEINDEX_COMPILER") + endif() + endif() + + foreach(newglossary ${newglossary_lines}) + string(REGEX REPLACE + "@newglossary[ \t]*{([^}]*)}{([^}]*)}{([^}]*)}{([^}]*)}" + "\\1" glossary_name ${newglossary} + ) + string(REGEX REPLACE + "@newglossary[ \t]*{([^}]*)}{([^}]*)}{([^}]*)}{([^}]*)}" + "${LATEX_TARGET}.\\2" glossary_log ${newglossary} + ) + string(REGEX REPLACE + "@newglossary[ \t]*{([^}]*)}{([^}]*)}{([^}]*)}{([^}]*)}" + "${LATEX_TARGET}.\\3" glossary_out ${newglossary} + ) + string(REGEX REPLACE + "@newglossary[ \t]*{([^}]*)}{([^}]*)}{([^}]*)}{([^}]*)}" + "${LATEX_TARGET}.\\4" glossary_in ${newglossary} + ) + + if(use_xindy) + latex_file_match(xdylanguage_line ${aux_file} + "@xdylanguage[ \t]*{${glossary_name}}{([^}]*)}" + "@xdylanguage{${glossary_name}}{english}" + ) + string(REGEX REPLACE + "@xdylanguage[ \t]*{${glossary_name}}{([^}]*)}" + "\\1" + language + ${xdylanguage_line} + ) + # What crazy person makes a LaTeX index generator that uses different + # identifiers for language than babel (or at least does not support + # the old ones)? + if(${language} STREQUAL "frenchb") + set(language "french") + elseif(${language} MATCHES "^n?germanb?$") + set(language "german") + elseif(${language} STREQUAL "magyar") + set(language "hungarian") + elseif(${language} STREQUAL "lsorbian") + set(language "lower-sorbian") + elseif(${language} STREQUAL "norsk") + set(language "norwegian") + elseif(${language} STREQUAL "portuges") + set(language "portuguese") + elseif(${language} STREQUAL "russianb") + set(language "russian") + elseif(${language} STREQUAL "slovene") + set(language "slovenian") + elseif(${language} STREQUAL "ukraineb") + set(language "ukrainian") + elseif(${language} STREQUAL "usorbian") + set(language "upper-sorbian") + endif() + if(language) + set(language_flags "-L ${language}") + else() + set(language_flags "") + endif() + + latex_file_match(codepage_line ${aux_file} + "@gls@codepage[ \t]*{${glossary_name}}{([^}]*)}" + "@gls@codepage{${glossary_name}}{utf}" + ) + string(REGEX REPLACE + "@gls@codepage[ \t]*{${glossary_name}}{([^}]*)}" + "\\1" + codepage + ${codepage_line} + ) + if(codepage) + set(codepage_flags "-C ${codepage}") + else() + # Ideally, we would check that the language is compatible with the + # default codepage, but I'm hoping that distributions will be smart + # enough to specify their own codepage. I know, it's asking a lot. + set(codepage_flags "") + endif() + + message("${XINDY_COMPILER} ${MAKEGLOSSARIES_COMPILER_ARGS} ${language_flags} ${codepage_flags} -I xindy -M ${glossary_name} -t ${glossary_log} -o ${glossary_out} ${glossary_in}" + ) + exec_program(${XINDY_COMPILER} + ARGS ${MAKEGLOSSARIES_COMPILER_ARGS} + ${language_flags} + ${codepage_flags} + -I xindy + -M ${glossary_name} + -t ${glossary_log} + -o ${glossary_out} + ${glossary_in} + OUTPUT_VARIABLE xindy_output + ) + message("${xindy_output}") + + # So, it is possible (perhaps common?) for aux files to specify a + # language and codepage that are incompatible with each other. Check + # for that condition, and if it happens run again with the default + # codepage. + if("${xindy_output}" MATCHES "^Cannot locate xindy module for language (.+) in codepage (.+)\\.$") + message("*************** Retrying xindy with default codepage.") + exec_program(${XINDY_COMPILER} + ARGS ${MAKEGLOSSARIES_COMPILER_ARGS} + ${language_flags} + -I xindy + -M ${glossary_name} + -t ${glossary_log} + -o ${glossary_out} + ${glossary_in} + ) + endif() + + else() + message("${MAKEINDEX_COMPILER} ${MAKEGLOSSARIES_COMPILER_ARGS} -s ${istfile} -t ${glossary_log} -o ${glossary_out} ${glossary_in}") + exec_program(${MAKEINDEX_COMPILER} ARGS ${MAKEGLOSSARIES_COMPILER_ARGS} + -s ${istfile} -t ${glossary_log} -o ${glossary_out} ${glossary_in} + ) + endif() + + endforeach(newglossary) +endfunction(latex_makeglossaries) + +function(latex_makenomenclature) + message("**************************** In makenomenclature") + if(NOT LATEX_TARGET) + message(SEND_ERROR "Need to define LATEX_TARGET") + endif() + + if(NOT MAKEINDEX_COMPILER) + message(SEND_ERROR "Need to define MAKEINDEX_COMPILER") + endif() + + set(nomencl_out ${LATEX_TARGET}.nls) + set(nomencl_in ${LATEX_TARGET}.nlo) + + exec_program(${MAKEINDEX_COMPILER} ARGS ${MAKENOMENCLATURE_COMPILER_ARGS} + ${nomencl_in} -s "nomencl.ist" -o ${nomencl_out} + ) +endfunction(latex_makenomenclature) + +function(latex_correct_synctex) + message("**************************** In correct SyncTeX") + if(NOT LATEX_TARGET) + message(SEND_ERROR "Need to define LATEX_TARGET") + endif() + + if(NOT GZIP) + message(SEND_ERROR "Need to define GZIP") + endif() + + if(NOT LATEX_SOURCE_DIRECTORY) + message(SEND_ERROR "Need to define LATEX_SOURCE_DIRECTORY") + endif() + + if(NOT LATEX_BINARY_DIRECTORY) + message(SEND_ERROR "Need to define LATEX_BINARY_DIRECTORY") + endif() + + set(synctex_file ${LATEX_BINARY_DIRECTORY}/${LATEX_TARGET}.synctex) + set(synctex_file_gz ${synctex_file}.gz) + + if(EXISTS ${synctex_file_gz}) + + message("Making backup of synctex file.") + configure_file(${synctex_file_gz} ${synctex_file}.bak.gz COPYONLY) + + message("Uncompressing synctex file.") + exec_program(${GZIP} + ARGS --decompress ${synctex_file_gz} + ) + + message("Reading synctex file.") + file(READ ${synctex_file} synctex_data) + + message("Replacing relative with absolute paths.") + string(REGEX REPLACE + "(Input:[0-9]+:)([^/\n][^\n]*)" + "\\1${LATEX_SOURCE_DIRECTORY}/\\2" + synctex_data + "${synctex_data}" + ) + + message("Writing synctex file.") + file(WRITE ${synctex_file} "${synctex_data}") + + message("Compressing synctex file.") + exec_program(${GZIP} + ARGS ${synctex_file} + ) + + else() + + message(SEND_ERROR "File ${synctex_file_gz} not found. Perhaps synctex is not supported by your LaTeX compiler.") + + endif() + +endfunction(latex_correct_synctex) + +function(latex_check_important_warnings) + set(log_file ${LATEX_TARGET}.log) + + message("\nChecking ${log_file} for important warnings.") + if(NOT LATEX_TARGET) + message(SEND_ERROR "Need to define LATEX_TARGET") + endif() + + if(NOT EXISTS ${log_file}) + message("Could not find log file: ${log_file}") + return() + endif() + + set(found_error) + + file(READ ${log_file} log) + + # Check for undefined references + string(REGEX MATCHALL + "\n[^\n]*Reference[^\n]*undefined[^\n]*" + reference_warnings + "${log}") + if(reference_warnings) + set(found_error TRUE) + message("\nFound missing reference warnings.") + foreach(warning ${reference_warnings}) + string(STRIP "${warning}" warning_no_newline) + message("${warning_no_newline}") + endforeach(warning) + endif() + + # Check for natbib warnings + string(REGEX MATCHALL + "\nPackage natbib Warning:[^\n]*" + natbib_warnings + "${log}") + if(natbib_warnings) + set(found_error TRUE) + message("\nFound natbib package warnings.") + foreach(warning ${natbib_warnings}) + string(STRIP "${warning}" warning_no_newline) + message("${warning_no_newline}") + endforeach(warning) + endif() + + # Check for overfull + string(REGEX MATCHALL + "\nOverfull[^\n]*" + overfull_warnings + "${log}") + if(overfull_warnings) + set(found_error TRUE) + message("\nFound overfull warnings. These are indicative of layout errors.") + foreach(warning ${overfull_warnings}) + string(STRIP "${warning}" warning_no_newline) + message("${warning_no_newline}") + endforeach(warning) + endif() + + # Check for invalid characters + string(REGEX MATCHALL + "\nMissing character:[^\n]*" + invalid_character_warnings + "${log}") + if(invalid_character_warnings) + set(found_error TRUE) + message("\nFound invalid character warnings. These characters are likely not printed correctly.") + foreach(warning ${invalid_character_warnings}) + string(STRIP "${warning}" warning_no_newline) + message("${warning_no_newline}") + endforeach(warning) + endif() + + if(found_error) + latex_get_filename_component(log_file_path ${log_file} ABSOLUTE) + message("\nConsult ${log_file_path} for more information on LaTeX build.") + else() + message("No known important warnings found.") + endif(found_error) +endfunction(latex_check_important_warnings) + +############################################################################# +# Helper functions for establishing LaTeX build. +############################################################################# + +function(latex_needit VAR NAME) + if(NOT ${VAR}) + message(SEND_ERROR "I need the ${NAME} command.") + endif() +endfunction(latex_needit) + +function(latex_wantit VAR NAME) + if(NOT ${VAR}) + message(STATUS "I could not find the ${NAME} command.") + endif() +endfunction(latex_wantit) + +function(latex_setup_variables) + set(LATEX_OUTPUT_PATH "${LATEX_OUTPUT_PATH}" + CACHE PATH "If non empty, specifies the location to place LaTeX output." + ) + + find_package(LATEX) + + find_program(XINDY_COMPILER + NAME xindy + PATHS ${MIKTEX_BINARY_PATH} /usr/bin + ) + + find_package(UnixCommands) + + find_program(PDFTOPS_CONVERTER + NAMES pdftops + DOC "The pdf to ps converter program from the Poppler package." + ) + + find_program(HTLATEX_COMPILER + NAMES htlatex + PATHS ${MIKTEX_BINARY_PATH} + /usr/bin + ) + + mark_as_advanced( + LATEX_COMPILER + PDFLATEX_COMPILER + BIBTEX_COMPILER + BIBER_COMPILER + MAKEINDEX_COMPILER + XINDY_COMPILER + DVIPS_CONVERTER + PS2PDF_CONVERTER + PDFTOPS_CONVERTER + LATEX2HTML_CONVERTER + HTLATEX_COMPILER + ) + + latex_needit(LATEX_COMPILER latex) + latex_wantit(PDFLATEX_COMPILER pdflatex) + latex_wantit(HTLATEX_COMPILER htlatex) + latex_needit(BIBTEX_COMPILER bibtex) + latex_wantit(BIBER_COMPILER biber) + latex_needit(MAKEINDEX_COMPILER makeindex) + latex_wantit(DVIPS_CONVERTER dvips) + latex_wantit(PS2PDF_CONVERTER ps2pdf) + latex_wantit(PDFTOPS_CONVERTER pdftops) + + set(LATEX_COMPILER_FLAGS "-interaction=batchmode -file-line-error" + CACHE STRING "Flags passed to latex.") + set(PDFLATEX_COMPILER_FLAGS ${LATEX_COMPILER_FLAGS} + CACHE STRING "Flags passed to pdflatex.") + set(HTLATEX_COMPILER_TEX4HT_FLAGS "html" + CACHE STRING "Options for the tex4ht.sty and *.4ht style files.") + set(HTLATEX_COMPILER_TEX4HT_POSTPROCESSOR_FLAGS "" + CACHE STRING "Options for the text4ht postprocessor.") + set(HTLATEX_COMPILER_T4HT_POSTPROCESSOR_FLAGS "" + CACHE STRING "Options for the t4ht postprocessor.") + set(HTLATEX_COMPILER_LATEX_FLAGS ${LATEX_COMPILER_FLAGS} + CACHE STRING "Flags passed from htlatex to the LaTeX compiler.") + set(LATEX_SYNCTEX_FLAGS "-synctex=1" + CACHE STRING "latex/pdflatex flags used to create synctex file.") + set(BIBTEX_COMPILER_FLAGS "" + CACHE STRING "Flags passed to bibtex.") + set(BIBER_COMPILER_FLAGS "" + CACHE STRING "Flags passed to biber.") + set(MAKEINDEX_COMPILER_FLAGS "" + CACHE STRING "Flags passed to makeindex.") + set(MAKEGLOSSARIES_COMPILER_FLAGS "" + CACHE STRING "Flags passed to makeglossaries.") + set(MAKENOMENCLATURE_COMPILER_FLAGS "" + CACHE STRING "Flags passed to makenomenclature.") + set(DVIPS_CONVERTER_FLAGS "-Ppdf -G0 -t letter" + CACHE STRING "Flags passed to dvips.") + if(NOT WIN32) + set(PS2PDF_CONVERTER_FLAGS "-dMaxSubsetPct=100 -dCompatibilityLevel=1.3 -dSubsetFonts=true -dEmbedAllFonts=true -dAutoFilterColorImages=false -dAutoFilterGrayImages=false -dColorImageFilter=/FlateEncode -dGrayImageFilter=/FlateEncode -dMonoImageFilter=/FlateEncode" + CACHE STRING "Flags passed to ps2pdf.") + else() + # Most windows ports of ghostscript utilities use .bat files for ps2pdf + # commands. bat scripts interpret "=" as a special character and separate + # those arguments. To get around this, the ghostscript utilities also + # support using "#" in place of "=". + set(PS2PDF_CONVERTER_FLAGS "-dMaxSubsetPct#100 -dCompatibilityLevel#1.3 -dSubsetFonts#true -dEmbedAllFonts#true -dAutoFilterColorImages#false -dAutoFilterGrayImages#false -dColorImageFilter#/FlateEncode -dGrayImageFilter#/FlateEncode -dMonoImageFilter#/FlateEncode" + CACHE STRING "Flags passed to ps2pdf.") + endif() + set(PDFTOPS_CONVERTER_FLAGS "" + CACHE STRING "Flags passed to pdftops.") + mark_as_advanced( + LATEX_COMPILER_FLAGS + PDFLATEX_COMPILER_FLAGS + HTLATEX_COMPILER_TEX4HT_FLAGS + HTLATEX_COMPILER_TEX4HT_POSTPROCESSOR_FLAGS + HTLATEX_COMPILER_T4HT_POSTPROCESSOR_FLAGS + HTLATEX_COMPILER_LATEX_FLAGS + LATEX_SYNCTEX_FLAGS + BIBTEX_COMPILER_FLAGS + BIBER_COMPILER_FLAGS + MAKEINDEX_COMPILER_FLAGS + MAKEGLOSSARIES_COMPILER_FLAGS + MAKENOMENCLATURE_COMPILER_FLAGS + DVIPS_CONVERTER_FLAGS + PS2PDF_CONVERTER_FLAGS + PDFTOPS_CONVERTER_FLAGS + ) + + # Because it is easier to type, the flags variables are entered as + # space-separated strings much like you would in a shell. However, when + # using a CMake command to execute a program, it works better to hold the + # arguments in semicolon-separated lists (otherwise the whole string will + # be interpreted as a single argument). Use the separate_arguments to + # convert the space-separated strings to semicolon-separated lists. + separate_arguments(LATEX_COMPILER_FLAGS) + separate_arguments(PDFLATEX_COMPILER_FLAGS) + separate_arguments(HTLATEX_COMPILER_LATEX_FLAGS) + separate_arguments(LATEX_SYNCTEX_FLAGS) + separate_arguments(BIBTEX_COMPILER_FLAGS) + separate_arguments(BIBER_COMPILER_FLAGS) + separate_arguments(MAKEINDEX_COMPILER_FLAGS) + separate_arguments(MAKEGLOSSARIES_COMPILER_FLAGS) + separate_arguments(MAKENOMENCLATURE_COMPILER_FLAGS) + separate_arguments(DVIPS_CONVERTER_FLAGS) + separate_arguments(PS2PDF_CONVERTER_FLAGS) + separate_arguments(PDFTOPS_CONVERTER_FLAGS) + + # Not quite done. When you call separate_arguments on a cache variable, + # the result is written to a local variable. That local variable goes + # away when this function returns (which is before any of them are used). + # So, copy these variables with local scope to cache variables with + # global scope. + set(LATEX_COMPILER_ARGS "${LATEX_COMPILER_FLAGS}" CACHE INTERNAL "") + set(PDFLATEX_COMPILER_ARGS "${PDFLATEX_COMPILER_FLAGS}" CACHE INTERNAL "") + set(HTLATEX_COMPILER_ARGS "${HTLATEX_COMPILER_LATEX_FLAGS}" CACHE INTERNAL "") + set(LATEX_SYNCTEX_ARGS "${LATEX_SYNCTEX_FLAGS}" CACHE INTERNAL "") + set(BIBTEX_COMPILER_ARGS "${BIBTEX_COMPILER_FLAGS}" CACHE INTERNAL "") + set(BIBER_COMPILER_ARGS "${BIBER_COMPILER_FLAGS}" CACHE INTERNAL "") + set(MAKEINDEX_COMPILER_ARGS "${MAKEINDEX_COMPILER_FLAGS}" CACHE INTERNAL "") + set(MAKEGLOSSARIES_COMPILER_ARGS "${MAKEGLOSSARIES_COMPILER_FLAGS}" CACHE INTERNAL "") + set(MAKENOMENCLATURE_COMPILER_ARGS "${MAKENOMENCLATURE_COMPILER_FLAGS}" CACHE INTERNAL "") + set(DVIPS_CONVERTER_ARGS "${DVIPS_CONVERTER_FLAGS}" CACHE INTERNAL "") + set(PS2PDF_CONVERTER_ARGS "${PS2PDF_CONVERTER_FLAGS}" CACHE INTERNAL "") + set(PDFTOPS_CONVERTER_ARGS "${PDFTOPS_CONVERTER_FLAGS}" CACHE INTERNAL "") + + find_program(IMAGEMAGICK_CONVERT + NAMES magick convert + DOC "The convert program that comes with ImageMagick (available at http://www.imagemagick.org)." + ) + mark_as_advanced(IMAGEMAGICK_CONVERT) + + if(DEFINED ENV{LATEX_DEFAULT_BUILD}) + set(default_build $ENV{LATEX_DEFAULT_BUILD}) + else() + set(default_build pdf) + endif() + + set(LATEX_DEFAULT_BUILD "${default_build}" CACHE STRING + "Choose the default type of LaTeX build. Valid options are pdf, dvi, ps, safepdf, html" + ) + set_property(CACHE LATEX_DEFAULT_BUILD + PROPERTY STRINGS pdf dvi ps safepdf html + ) + + option(LATEX_USE_SYNCTEX + "If on, have LaTeX generate a synctex file, which WYSIWYG editors can use to correlate output files like dvi and pdf with the lines of LaTeX source that generates them. In addition to adding the LATEX_SYNCTEX_FLAGS to the command line, this option also adds build commands that \"corrects\" the resulting synctex file to point to the original LaTeX files rather than those generated by UseLATEX.cmake." + OFF + ) + + option(LATEX_SMALL_IMAGES + "If on, the raster images will be converted to 1/6 the original size. This is because papers usually require 600 dpi images whereas most monitors only require at most 96 dpi. Thus, smaller images make smaller files for web distribution and can make it faster to read dvi files." + OFF) + if(LATEX_SMALL_IMAGES) + set(LATEX_RASTER_SCALE 16 PARENT_SCOPE) + set(LATEX_OPPOSITE_RASTER_SCALE 100 PARENT_SCOPE) + else() + set(LATEX_RASTER_SCALE 100 PARENT_SCOPE) + set(LATEX_OPPOSITE_RASTER_SCALE 16 PARENT_SCOPE) + endif() + + # Just holds extensions for known image types. They should all be lower case. + # For historical reasons, these are all declared in the global scope. + set(LATEX_DVI_VECTOR_IMAGE_EXTENSIONS .eps CACHE INTERNAL "") + set(LATEX_DVI_RASTER_IMAGE_EXTENSIONS CACHE INTERNAL "") + set(LATEX_DVI_IMAGE_EXTENSIONS + ${LATEX_DVI_VECTOR_IMAGE_EXTENSIONS} + ${LATEX_DVI_RASTER_IMAGE_EXTENSIONS} + CACHE INTERNAL "" + ) + + set(LATEX_PDF_VECTOR_IMAGE_EXTENSIONS .pdf CACHE INTERNAL "") + set(LATEX_PDF_RASTER_IMAGE_EXTENSIONS .jpeg .jpg .png CACHE INTERNAL "") + set(LATEX_PDF_IMAGE_EXTENSIONS + ${LATEX_PDF_VECTOR_IMAGE_EXTENSIONS} + ${LATEX_PDF_RASTER_IMAGE_EXTENSIONS} + CACHE INTERNAL "" + ) + + set(LATEX_OTHER_VECTOR_IMAGE_EXTENSIONS .ai .dot .svg CACHE INTERNAL "") + set(LATEX_OTHER_RASTER_IMAGE_EXTENSIONS + .bmp .bmp2 .bmp3 .dcm .dcx .ico .gif .pict .ppm .tif .tiff + CACHE INTERNAL "") + set(LATEX_OTHER_IMAGE_EXTENSIONS + ${LATEX_OTHER_VECTOR_IMAGE_EXTENSIONS} + ${LATEX_OTHER_RASTER_IMAGE_EXTENSIONS} + CACHE INTERNAL "" + ) + + set(LATEX_VECTOR_IMAGE_EXTENSIONS + ${LATEX_DVI_VECTOR_IMAGE_EXTENSIONS} + ${LATEX_PDF_VECTOR_IMAGE_EXTENSIONS} + ${LATEX_OTHER_VECTOR_IMAGE_EXTENSIONS} + CACHE INTERNAL "" + ) + set(LATEX_RASTER_IMAGE_EXTENSIONS + ${LATEX_DVI_RASTER_IMAGE_EXTENSIONS} + ${LATEX_PDF_RASTER_IMAGE_EXTENSIONS} + ${LATEX_OTHER_RASTER_IMAGE_EXTENSIONS} + CACHE INTERNAL "" + ) + set(LATEX_IMAGE_EXTENSIONS + ${LATEX_DVI_IMAGE_EXTENSIONS} + ${LATEX_PDF_IMAGE_EXTENSIONS} + ${LATEX_OTHER_IMAGE_EXTENSIONS} + CACHE INTERNAL "" + ) +endfunction(latex_setup_variables) + +function(latex_setup_targets) + if(NOT TARGET pdf) + add_custom_target(pdf) + endif() + if(NOT TARGET dvi) + add_custom_target(dvi) + endif() + if(NOT TARGET ps) + add_custom_target(ps) + endif() + if(NOT TARGET safepdf) + add_custom_target(safepdf) + endif() + if(NOT TARGET html) + add_custom_target(html) + endif() + if(NOT TARGET auxclean) + add_custom_target(auxclean) + endif() +endfunction(latex_setup_targets) + +function(latex_get_output_path var) + set(latex_output_path) + if(LATEX_OUTPUT_PATH) + get_filename_component( + LATEX_OUTPUT_PATH_FULL "${LATEX_OUTPUT_PATH}" ABSOLUTE + ) + if("${LATEX_OUTPUT_PATH_FULL}" STREQUAL "${CMAKE_CURRENT_SOURCE_DIR}") + message(SEND_ERROR "You cannot set LATEX_OUTPUT_PATH to the same directory that contains LaTeX input files.") + else() + set(latex_output_path "${LATEX_OUTPUT_PATH_FULL}") + endif() + else() + if("${CMAKE_CURRENT_BINARY_DIR}" STREQUAL "${CMAKE_CURRENT_SOURCE_DIR}") + message(SEND_ERROR "LaTeX files must be built out of source or you must set LATEX_OUTPUT_PATH.") + else() + set(latex_output_path "${CMAKE_CURRENT_BINARY_DIR}") + endif() + endif() + set(${var} ${latex_output_path} PARENT_SCOPE) +endfunction(latex_get_output_path) + +function(latex_add_convert_command + output_path + input_path + output_extension + input_extension + flags + ) + set(require_imagemagick_convert TRUE) + set(convert_flags "") + if(${input_extension} STREQUAL ".eps" AND ${output_extension} STREQUAL ".pdf") + # ImageMagick has broken eps to pdf conversion + # use ps2pdf instead + if(PS2PDF_CONVERTER) + set(require_imagemagick_convert FALSE) + set(converter ${PS2PDF_CONVERTER}) + set(convert_flags -dEPSCrop ${PS2PDF_CONVERTER_ARGS}) + else() + message(SEND_ERROR "Using postscript files with pdflatex requires ps2pdf for conversion.") + endif() + elseif(${input_extension} STREQUAL ".pdf" AND ${output_extension} STREQUAL ".eps") + # ImageMagick can also be sketchy on pdf to eps conversion. Not good with + # color spaces and tends to unnecessarily rasterize. + # use pdftops instead + if(PDFTOPS_CONVERTER) + set(require_imagemagick_convert FALSE) + set(converter ${PDFTOPS_CONVERTER}) + set(convert_flags -eps ${PDFTOPS_CONVERTER_ARGS}) + else() + message(STATUS "Consider getting pdftops from Poppler to convert PDF images to EPS images.") + set(convert_flags ${flags}) + endif() + else() + set(convert_flags ${flags}) + endif() + + if(require_imagemagick_convert) + if(IMAGEMAGICK_CONVERT) + string(TOLOWER ${IMAGEMAGICK_CONVERT} IMAGEMAGICK_CONVERT_LOWERCASE) + if(${IMAGEMAGICK_CONVERT_LOWERCASE} MATCHES "system32[/\\\\]convert\\.exe") + message(SEND_ERROR "IMAGEMAGICK_CONVERT set to Window's convert.exe for changing file systems rather than ImageMagick's convert for changing image formats. Please make sure ImageMagick is installed (available at http://www.imagemagick.org). If you have a recent version of ImageMagick (7.0 or higher), use the magick program instead of convert for IMAGEMAGICK_CONVERT.") + else() + set(converter ${IMAGEMAGICK_CONVERT}) + # ImageMagick requires a special order of arguments where resize and + # arguments of that nature must be placed after the input image path. + add_custom_command(OUTPUT ${output_path} + COMMAND ${converter} + ARGS ${input_path} ${convert_flags} ${output_path} + DEPENDS ${input_path} + ) + endif() + else() + message(SEND_ERROR "Could not find convert program. Please download ImageMagick from http://www.imagemagick.org and install.") + endif() + else() # Not ImageMagick convert + add_custom_command(OUTPUT ${output_path} + COMMAND ${converter} + ARGS ${convert_flags} ${input_path} ${output_path} + DEPENDS ${input_path} + ) + endif() +endfunction(latex_add_convert_command) + +# Makes custom commands to convert a file to a particular type. +function(latex_convert_image + output_files_var + input_file + output_extension + convert_flags + output_extensions + other_files + ) + set(output_file_list) + set(input_dir ${CMAKE_CURRENT_SOURCE_DIR}) + latex_get_output_path(output_dir) + + latex_get_filename_component(extension "${input_file}" EXT) + + # Check input filename for potential problems with LaTeX. + latex_get_filename_component(name "${input_file}" NAME_WE) + set(suggested_name "${name}") + if(suggested_name MATCHES ".*\\..*") + string(REPLACE "." "-" suggested_name "${suggested_name}") + endif() + if(suggested_name MATCHES ".* .*") + string(REPLACE " " "-" suggested_name "${suggested_name}") + endif() + if(NOT suggested_name STREQUAL name) + message(WARNING "Some LaTeX distributions have problems with image file names with multiple extensions or spaces. Consider changing ${name}${extension} to something like ${suggested_name}${extension}.") + endif() + + string(REGEX REPLACE "\\.[^.]*\$" ${output_extension} output_file + "${input_file}") + + latex_list_contains(is_type ${extension} ${output_extensions}) + if(is_type) + if(convert_flags) + latex_add_convert_command(${output_dir}/${output_file} + ${input_dir}/${input_file} ${output_extension} ${extension} + "${convert_flags}") + set(output_file_list ${output_dir}/${output_file}) + else() + # As a shortcut, we can just copy the file. + add_custom_command(OUTPUT ${output_dir}/${input_file} + COMMAND ${CMAKE_COMMAND} + ARGS -E copy ${input_dir}/${input_file} ${output_dir}/${input_file} + DEPENDS ${input_dir}/${input_file} + ) + set(output_file_list ${output_dir}/${input_file}) + endif() + else() + set(do_convert TRUE) + # Check to see if there is another input file of the appropriate type. + foreach(valid_extension ${output_extensions}) + string(REGEX REPLACE "\\.[^.]*\$" ${output_extension} try_file + "${input_file}") + latex_list_contains(has_native_file "${try_file}" ${other_files}) + if(has_native_file) + set(do_convert FALSE) + endif() + endforeach(valid_extension) + + # If we still need to convert, do it. + if(do_convert) + latex_add_convert_command(${output_dir}/${output_file} + ${input_dir}/${input_file} ${output_extension} ${extension} + "${convert_flags}") + set(output_file_list ${output_dir}/${output_file}) + endif() + endif() + + set(${output_files_var} ${output_file_list} PARENT_SCOPE) +endfunction(latex_convert_image) + +# Adds custom commands to process the given files for dvi and pdf builds. +# Adds the output files to the given variables (does not replace). +function(latex_process_images dvi_outputs_var pdf_outputs_var) + latex_get_output_path(output_dir) + set(dvi_outputs) + set(pdf_outputs) + foreach(file ${ARGN}) + if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/${file}") + latex_get_filename_component(extension "${file}" EXT) + set(convert_flags) + + # Check to see if we need to downsample the image. + latex_list_contains(is_raster "${extension}" + ${LATEX_RASTER_IMAGE_EXTENSIONS}) + if(LATEX_SMALL_IMAGES) + if(is_raster) + set(convert_flags -resize ${LATEX_RASTER_SCALE}%) + endif() + endif() + + # Make sure the output directory exists. + latex_get_filename_component(path "${output_dir}/${file}" PATH) + make_directory("${path}") + + # Do conversions for dvi. + latex_convert_image(output_files "${file}" .eps "${convert_flags}" + "${LATEX_DVI_IMAGE_EXTENSIONS}" "${ARGN}") + list(APPEND dvi_outputs ${output_files}) + + # Do conversions for pdf. + if(is_raster) + latex_convert_image(output_files "${file}" .png "${convert_flags}" + "${LATEX_PDF_IMAGE_EXTENSIONS}" "${ARGN}") + list(APPEND pdf_outputs ${output_files}) + else() + latex_convert_image(output_files "${file}" .pdf "${convert_flags}" + "${LATEX_PDF_IMAGE_EXTENSIONS}" "${ARGN}") + list(APPEND pdf_outputs ${output_files}) + endif() + else() + message(WARNING "Could not find file ${CMAKE_CURRENT_SOURCE_DIR}/${file}. Are you sure you gave relative paths to IMAGES?") + endif() + endforeach(file) + + set(${dvi_outputs_var} ${dvi_outputs} PARENT_SCOPE) + set(${pdf_outputs_var} ${pdf_outputs} PARENT_SCOPE) +endfunction(latex_process_images) + +function(latex_copy_globbed_files pattern dest) + file(GLOB file_list ${pattern}) + foreach(in_file ${file_list}) + latex_get_filename_component(out_file ${in_file} NAME) + configure_file(${in_file} ${dest}/${out_file} COPYONLY) + endforeach(in_file) +endfunction(latex_copy_globbed_files) + +function(latex_copy_input_file file) + latex_get_output_path(output_dir) + + if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${file}) + latex_get_filename_component(path ${file} PATH) + file(MAKE_DIRECTORY ${output_dir}/${path}) + + latex_list_contains(use_config ${file} ${LATEX_CONFIGURE}) + if(use_config) + configure_file(${CMAKE_CURRENT_SOURCE_DIR}/${file} + ${output_dir}/${file} + @ONLY + ) + add_custom_command(OUTPUT ${output_dir}/${file} + COMMAND ${CMAKE_COMMAND} + ARGS ${CMAKE_BINARY_DIR} + DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/${file} + ) + else() + add_custom_command(OUTPUT ${output_dir}/${file} + COMMAND ${CMAKE_COMMAND} + ARGS -E copy ${CMAKE_CURRENT_SOURCE_DIR}/${file} ${output_dir}/${file} + DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/${file} + ) + endif() + else() + if(EXISTS ${output_dir}/${file}) + # Special case: output exists but input does not. Assume that it was + # created elsewhere and skip the input file copy. + else() + message("Could not find input file ${CMAKE_CURRENT_SOURCE_DIR}/${file}") + endif() + endif() +endfunction(latex_copy_input_file) + +############################################################################# +# Commands provided by the UseLATEX.cmake "package" +############################################################################# + +function(latex_usage command message) + message(SEND_ERROR + "${message}\n Usage: ${command}(\n [BIBFILES ...]\n [INPUTS ...]\n [IMAGE_DIRS ...]\n [IMAGES \n [CONFIGURE ...]\n [DEPENDS ...]\n [MULTIBIB_NEWCITES] \n [USE_BIBLATEX] [USE_INDEX] [USE_GLOSSARY] [USE_NOMENCL]\n [FORCE_PDF] [FORCE_DVI] [FORCE_HTML]\n [TARGET_NAME] \n [EXCLUDE_FROM_ALL]\n [EXCLUDE_FROM_DEFAULTS])" + ) +endfunction(latex_usage command message) + +# Parses arguments to add_latex_document and ADD_LATEX_TARGETS and sets the +# variables LATEX_TARGET, LATEX_IMAGE_DIR, LATEX_BIBFILES, LATEX_DEPENDS, and +# LATEX_INPUTS. +function(parse_add_latex_arguments command latex_main_input) + set(options + USE_BIBLATEX + USE_INDEX + USE_GLOSSARY + USE_NOMENCL + FORCE_PDF + FORCE_DVI + FORCE_HTML + EXCLUDE_FROM_ALL + EXCLUDE_FROM_DEFAULTS + # Deprecated options + USE_GLOSSARIES + DEFAULT_PDF + DEFAULT_SAFEPDF + DEFAULT_PS + NO_DEFAULT + MANGLE_TARGET_NAMES + ) + set(oneValueArgs + TARGET_NAME + ) + set(multiValueArgs + BIBFILES + MULTIBIB_NEWCITES + INPUTS + IMAGE_DIRS + IMAGES + CONFIGURE + DEPENDS + INDEX_NAMES + ) + cmake_parse_arguments( + LATEX "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + # Handle invalid and deprecated arguments + if(LATEX_UNPARSED_ARGUMENTS) + latex_usage(${command} "Invalid or deprecated arguments: ${LATEX_UNPARSED_ARGUMENTS}") + endif() + if(LATEX_USE_GLOSSARIES) + latex_usage(${command} "USE_GLOSSARIES option removed in version 1.6.1. Use USE_GLOSSARY instead.") + endif() + if(LATEX_DEFAULT_PDF) + latex_usage(${command} "DEFAULT_PDF option removed in version 2.0. Use FORCE_PDF option or LATEX_DEFAULT_BUILD CMake variable instead.") + endif() + if(LATEX_DEFAULT_SAFEPDF) + latex_usage(${command} "DEFAULT_SAFEPDF option removed in version 2.0. Use LATEX_DEFAULT_BUILD CMake variable instead.") + endif() + if(LATEX_DEFAULT_DVI) + latex_usage(${command} "DEFAULT_DVI option removed in version 2.0. Use FORCE_DVI option or LATEX_DEFAULT_BUILD CMake variable instead.") + endif() + if(LATEX_NO_DEFAULT) + latex_usage(${command} "NO_DEFAULT option removed in version 2.0. Use EXCLUDE_FROM_ALL instead.") + endif() + if(LATEX_MANGLE_TARGET_NAMES) + latex_usage(${command} "MANGLE_TARGET_NAMES option removed in version 2.0. All LaTeX targets use mangled names now.") + endif() + + # Capture the first argument, which is the main LaTeX input. + latex_get_filename_component(latex_target ${latex_main_input} NAME_WE) + set(LATEX_MAIN_INPUT ${latex_main_input} PARENT_SCOPE) + set(LATEX_TARGET ${latex_target} PARENT_SCOPE) + + # Propagate the result variables to the caller + foreach(arg_name ${options} ${oneValueArgs} ${multiValueArgs}) + set(var_name LATEX_${arg_name}) + set(${var_name} ${${var_name}} PARENT_SCOPE) + endforeach(arg_name) +endfunction(parse_add_latex_arguments) + +function(add_latex_targets_internal) + latex_get_output_path(output_dir) + + if(LATEX_USE_SYNCTEX) + set(synctex_flags ${LATEX_SYNCTEX_ARGS}) + else() + set(synctex_flags) + endif() + + # The commands to run LaTeX. They are repeated multiple times. + set(latex_build_command + ${LATEX_COMPILER} ${LATEX_COMPILER_ARGS} ${synctex_flags} ${LATEX_MAIN_INPUT} + ) + if(LATEX_COMPILER_ARGS MATCHES ".*batchmode.*") + # Wrap command in script that dumps the log file on error. This makes sure + # errors can be seen. + set(latex_build_command + ${CMAKE_COMMAND} + -D LATEX_BUILD_COMMAND=execute_latex + -D LATEX_TARGET=${LATEX_TARGET} + -D LATEX_WORKING_DIRECTORY="${output_dir}" + -D LATEX_FULL_COMMAND="${latex_build_command}" + -D LATEX_OUTPUT_FILE="${LATEX_TARGET}.dvi" + -P "${LATEX_USE_LATEX_LOCATION}" + ) + endif() + set(pdflatex_build_command + ${PDFLATEX_COMPILER} ${PDFLATEX_COMPILER_ARGS} ${synctex_flags} ${LATEX_MAIN_INPUT} + ) + if(PDFLATEX_COMPILER_ARGS MATCHES ".*batchmode.*") + # Wrap command in script that dumps the log file on error. This makes sure + # errors can be seen. + set(pdflatex_build_command + ${CMAKE_COMMAND} + -D LATEX_BUILD_COMMAND=execute_latex + -D LATEX_TARGET=${LATEX_TARGET} + -D LATEX_WORKING_DIRECTORY="${output_dir}" + -D LATEX_FULL_COMMAND="${pdflatex_build_command}" + -D LATEX_OUTPUT_FILE="${LATEX_TARGET}.pdf" + -P "${LATEX_USE_LATEX_LOCATION}" + ) + endif() + + if(NOT LATEX_TARGET_NAME) + # Use the main filename (minus the .tex) as the target name. Remove any + # spaces since CMake cannot have spaces in its target names. + string(REPLACE " " "_" LATEX_TARGET_NAME ${LATEX_TARGET}) + endif() + + # Some LaTeX commands may need to be modified (or may not work) if the main + # tex file is in a subdirectory. Make a flag for that. + get_filename_component(LATEX_MAIN_INPUT_SUBDIR ${LATEX_MAIN_INPUT} DIRECTORY) + + # Set up target names. + set(dvi_target ${LATEX_TARGET_NAME}_dvi) + set(pdf_target ${LATEX_TARGET_NAME}_pdf) + set(ps_target ${LATEX_TARGET_NAME}_ps) + set(safepdf_target ${LATEX_TARGET_NAME}_safepdf) + set(html_target ${LATEX_TARGET_NAME}_html) + set(auxclean_target ${LATEX_TARGET_NAME}_auxclean) + + # Probably not all of these will be generated, but they could be. + # Note that the aux file is added later. + set(auxiliary_clean_files + ${output_dir}/${LATEX_TARGET}.aux + ${output_dir}/${LATEX_TARGET}.bbl + ${output_dir}/${LATEX_TARGET}.blg + ${output_dir}/${LATEX_TARGET}-blx.bib + ${output_dir}/${LATEX_TARGET}.glg + ${output_dir}/${LATEX_TARGET}.glo + ${output_dir}/${LATEX_TARGET}.gls + ${output_dir}/${LATEX_TARGET}.idx + ${output_dir}/${LATEX_TARGET}.ilg + ${output_dir}/${LATEX_TARGET}.ind + ${output_dir}/${LATEX_TARGET}.ist + ${output_dir}/${LATEX_TARGET}.log + ${output_dir}/${LATEX_TARGET}.out + ${output_dir}/${LATEX_TARGET}.toc + ${output_dir}/${LATEX_TARGET}.lof + ${output_dir}/${LATEX_TARGET}.xdy + ${output_dir}/${LATEX_TARGET}.synctex.gz + ${output_dir}/${LATEX_TARGET}.synctex.bak.gz + ${output_dir}/${LATEX_TARGET}.dvi + ${output_dir}/${LATEX_TARGET}.ps + ${output_dir}/${LATEX_TARGET}.pdf + ) + + set(image_list ${LATEX_IMAGES}) + + # For each directory in LATEX_IMAGE_DIRS, glob all the image files and + # place them in LATEX_IMAGES. + foreach(dir ${LATEX_IMAGE_DIRS}) + if(NOT EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${dir}) + message(WARNING "Image directory ${CMAKE_CURRENT_SOURCE_DIR}/${dir} does not exist. Are you sure you gave relative directories to IMAGE_DIRS?") + endif() + foreach(extension ${LATEX_IMAGE_EXTENSIONS}) + file(GLOB files ${CMAKE_CURRENT_SOURCE_DIR}/${dir}/*${extension}) + foreach(file ${files}) + latex_get_filename_component(filename ${file} NAME) + list(APPEND image_list ${dir}/${filename}) + endforeach(file) + endforeach(extension) + endforeach(dir) + + latex_process_images(dvi_images pdf_images ${image_list}) + + set(make_dvi_command + ${CMAKE_COMMAND} -E chdir ${output_dir} + ${latex_build_command}) + set(make_pdf_command + ${CMAKE_COMMAND} -E chdir ${output_dir} + ${pdflatex_build_command} + ) + + set(make_dvi_depends ${LATEX_DEPENDS} ${dvi_images}) + set(make_pdf_depends ${LATEX_DEPENDS} ${pdf_images}) + foreach(input ${LATEX_MAIN_INPUT} ${LATEX_INPUTS}) + list(APPEND make_dvi_depends ${output_dir}/${input}) + list(APPEND make_pdf_depends ${output_dir}/${input}) + if(${input} MATCHES "\\.tex$") + # Dependent .tex files might have their own .aux files created. Make + # sure these get cleaned as well. This might replicate the cleaning + # of the main .aux file, which is OK. + string(REGEX REPLACE "\\.tex$" "" input_we ${input}) + list(APPEND auxiliary_clean_files + ${output_dir}/${input_we}.aux + ${output_dir}/${input}.aux + ) + endif() + endforeach(input) + + set(all_latex_sources ${LATEX_MAIN_INPUT} ${LATEX_INPUTS} ${image_list}) + + if(LATEX_USE_GLOSSARY) + foreach(dummy 0 1) # Repeat these commands twice. + set(make_dvi_command ${make_dvi_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${CMAKE_COMMAND} + -D LATEX_BUILD_COMMAND=makeglossaries + -D LATEX_TARGET=${LATEX_TARGET} + -D MAKEINDEX_COMPILER=${MAKEINDEX_COMPILER} + -D XINDY_COMPILER=${XINDY_COMPILER} + -D MAKEGLOSSARIES_COMPILER_ARGS=${MAKEGLOSSARIES_COMPILER_ARGS} + -P ${LATEX_USE_LATEX_LOCATION} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${latex_build_command} + ) + set(make_pdf_command ${make_pdf_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${CMAKE_COMMAND} + -D LATEX_BUILD_COMMAND=makeglossaries + -D LATEX_TARGET=${LATEX_TARGET} + -D MAKEINDEX_COMPILER=${MAKEINDEX_COMPILER} + -D XINDY_COMPILER=${XINDY_COMPILER} + -D MAKEGLOSSARIES_COMPILER_ARGS=${MAKEGLOSSARIES_COMPILER_ARGS} + -P ${LATEX_USE_LATEX_LOCATION} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${pdflatex_build_command} + ) + endforeach(dummy) + endif() + + if(LATEX_USE_NOMENCL) + foreach(dummy 0 1) # Repeat these commands twice. + set(make_dvi_command ${make_dvi_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${CMAKE_COMMAND} + -D LATEX_BUILD_COMMAND=makenomenclature + -D LATEX_TARGET=${LATEX_TARGET} + -D MAKEINDEX_COMPILER=${MAKEINDEX_COMPILER} + -D MAKENOMENCLATURE_COMPILER_ARGS=${MAKENOMENCLATURE_COMPILER_ARGS} + -P ${LATEX_USE_LATEX_LOCATION} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${latex_build_command} + ) + set(make_pdf_command ${make_pdf_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${CMAKE_COMMAND} + -D LATEX_BUILD_COMMAND=makenomenclature + -D LATEX_TARGET=${LATEX_TARGET} + -D MAKEINDEX_COMPILER=${MAKEINDEX_COMPILER} + -D MAKENOMENCLATURE_COMPILER_ARGS=${MAKENOMENCLATURE_COMPILER_ARGS} + -P ${LATEX_USE_LATEX_LOCATION} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${pdflatex_build_command} + ) + endforeach(dummy) + endif() + + if(LATEX_BIBFILES) + if(LATEX_USE_BIBLATEX) + if(NOT BIBER_COMPILER) + message(SEND_ERROR "I need the biber command.") + endif() + set(bib_compiler ${BIBER_COMPILER}) + set(bib_compiler_flags ${BIBER_COMPILER_ARGS}) + else() + set(bib_compiler ${BIBTEX_COMPILER}) + set(bib_compiler_flags ${BIBTEX_COMPILER_ARGS}) + endif() + if(LATEX_MULTIBIB_NEWCITES) + foreach (multibib_auxfile ${LATEX_MULTIBIB_NEWCITES}) + latex_get_filename_component(multibib_target ${multibib_auxfile} NAME_WE) + set(make_dvi_command ${make_dvi_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${bib_compiler} ${bib_compiler_flags} ${multibib_target}) + set(make_pdf_command ${make_pdf_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${bib_compiler} ${bib_compiler_flags} ${multibib_target}) + set(auxiliary_clean_files ${auxiliary_clean_files} + ${output_dir}/${multibib_target}.aux) + endforeach (multibib_auxfile ${LATEX_MULTIBIB_NEWCITES}) + else() + set(make_dvi_command ${make_dvi_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${bib_compiler} ${bib_compiler_flags} ${LATEX_TARGET}) + set(make_pdf_command ${make_pdf_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${bib_compiler} ${bib_compiler_flags} ${LATEX_TARGET}) + endif() + + foreach (bibfile ${LATEX_BIBFILES}) + list(APPEND make_dvi_depends ${output_dir}/${bibfile}) + list(APPEND make_pdf_depends ${output_dir}/${bibfile}) + endforeach (bibfile ${LATEX_BIBFILES}) + else() + if(LATEX_MULTIBIB_NEWCITES) + message(WARNING "MULTIBIB_NEWCITES has no effect without BIBFILES option.") + endif() + endif() + + if(LATEX_USE_INDEX) + if(LATEX_INDEX_NAMES) + set(INDEX_NAMES ${LATEX_INDEX_NAMES}) + else() + set(INDEX_NAMES ${LATEX_TARGET}) + endif() + foreach(idx_name ${INDEX_NAMES}) + set(make_dvi_command ${make_dvi_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${latex_build_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${MAKEINDEX_COMPILER} ${MAKEINDEX_COMPILER_ARGS} ${idx_name}.idx) + set(make_pdf_command ${make_pdf_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${pdflatex_build_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${MAKEINDEX_COMPILER} ${MAKEINDEX_COMPILER_ARGS} ${idx_name}.idx) + set(auxiliary_clean_files ${auxiliary_clean_files} + ${output_dir}/${idx_name}.idx + ${output_dir}/${idx_name}.ilg + ${output_dir}/${idx_name}.ind) + endforeach() + else() + if(LATEX_INDEX_NAMES) + message(WARNING "INDEX_NAMES has no effect without USE_INDEX option.") + endif() + endif() + + set(make_dvi_command ${make_dvi_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${latex_build_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${latex_build_command}) + set(make_pdf_command ${make_pdf_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${pdflatex_build_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${pdflatex_build_command}) + + # Need to run one more time to remove biblatex' warning + # about page breaks that have changed. + if(LATEX_USE_BIBLATEX) + set(make_dvi_command ${make_dvi_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${latex_build_command}) + set(make_pdf_command ${make_pdf_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${pdflatex_build_command}) + endif() + + if(LATEX_USE_SYNCTEX) + if(NOT GZIP) + message(SEND_ERROR "UseLATEX.cmake: USE_SYNTEX option requires gzip program. Set GZIP variable.") + endif() + set(make_dvi_command ${make_dvi_command} + COMMAND ${CMAKE_COMMAND} + -D LATEX_BUILD_COMMAND=correct_synctex + -D LATEX_TARGET=${LATEX_TARGET} + -D GZIP=${GZIP} + -D "LATEX_SOURCE_DIRECTORY=${CMAKE_CURRENT_SOURCE_DIR}" + -D "LATEX_BINARY_DIRECTORY=${output_dir}" + -P ${LATEX_USE_LATEX_LOCATION} + ) + set(make_pdf_command ${make_pdf_command} + COMMAND ${CMAKE_COMMAND} + -D LATEX_BUILD_COMMAND=correct_synctex + -D LATEX_TARGET=${LATEX_TARGET} + -D GZIP=${GZIP} + -D "LATEX_SOURCE_DIRECTORY=${CMAKE_CURRENT_SOURCE_DIR}" + -D "LATEX_BINARY_DIRECTORY=${output_dir}" + -P ${LATEX_USE_LATEX_LOCATION} + ) + endif() + + # Check LaTeX output for important warnings at end of build + set(make_dvi_command ${make_dvi_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${CMAKE_COMMAND} + -D LATEX_BUILD_COMMAND=check_important_warnings + -D LATEX_TARGET=${LATEX_TARGET} + -P ${LATEX_USE_LATEX_LOCATION} + ) + set(make_pdf_command ${make_pdf_command} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${CMAKE_COMMAND} + -D LATEX_BUILD_COMMAND=check_important_warnings + -D LATEX_TARGET=${LATEX_TARGET} + -P ${LATEX_USE_LATEX_LOCATION} + ) + + # Capture the default build. + string(TOLOWER "${LATEX_DEFAULT_BUILD}" default_build) + + if((NOT LATEX_FORCE_PDF) AND (NOT LATEX_FORCE_DVI) AND (NOT LATEX_FORCE_HTML)) + set(no_force TRUE) + endif() + + # Add commands and targets for building pdf outputs (with pdflatex). + if(LATEX_FORCE_PDF OR no_force) + if(LATEX_FORCE_PDF) + set(default_build pdf) + endif() + + if(PDFLATEX_COMPILER) + add_custom_command(OUTPUT ${output_dir}/${LATEX_TARGET}.pdf + COMMAND ${make_pdf_command} + DEPENDS ${make_pdf_depends} + ) + add_custom_target(${pdf_target} + DEPENDS ${output_dir}/${LATEX_TARGET}.pdf + SOURCES ${all_latex_sources} + ) + if(NOT LATEX_EXCLUDE_FROM_DEFAULTS) + add_dependencies(pdf ${pdf_target}) + endif() + endif() + endif() + + # Add commands and targets for building dvi outputs. + if(LATEX_FORCE_DVI OR LATEX_FORCE_HTML OR no_force) + if(LATEX_FORCE_DVI) + if((NOT default_build STREQUAL dvi) AND + (NOT default_build STREQUAL ps) AND + (NOT default_build STREQUAL safepdf)) + set(default_build dvi) + endif() + endif() + + add_custom_command(OUTPUT ${output_dir}/${LATEX_TARGET}.dvi + COMMAND ${make_dvi_command} + DEPENDS ${make_dvi_depends} + ) + add_custom_target(${dvi_target} + DEPENDS ${output_dir}/${LATEX_TARGET}.dvi + SOURCES ${all_latex_sources} + ) + if(NOT LATEX_EXCLUDE_FROM_DEFAULTS) + add_dependencies(dvi ${dvi_target}) + endif() + + if(DVIPS_CONVERTER) + add_custom_command(OUTPUT ${output_dir}/${LATEX_TARGET}.ps + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${DVIPS_CONVERTER} ${DVIPS_CONVERTER_ARGS} -o ${LATEX_TARGET}.ps ${LATEX_TARGET}.dvi + DEPENDS ${output_dir}/${LATEX_TARGET}.dvi) + add_custom_target(${ps_target} + DEPENDS ${output_dir}/${LATEX_TARGET}.ps + SOURCES ${all_latex_sources} + ) + if(NOT LATEX_EXCLUDE_FROM_DEFAULTS) + add_dependencies(ps ${ps_target}) + endif() + if(PS2PDF_CONVERTER) + # Since both the pdf and safepdf targets have the same output, we + # cannot properly do the dependencies for both. When selecting safepdf, + # simply force a recompile every time. + add_custom_target(${safepdf_target} + ${CMAKE_COMMAND} -E chdir ${output_dir} + ${PS2PDF_CONVERTER} ${PS2PDF_CONVERTER_ARGS} ${LATEX_TARGET}.ps ${LATEX_TARGET}.pdf + DEPENDS ${ps_target} + ) + if(NOT LATEX_EXCLUDE_FROM_DEFAULTS) + add_dependencies(safepdf ${safepdf_target}) + endif() + endif() + endif() + endif() + + if(LATEX_FORCE_HTML OR no_force) + if (LATEX_FORCE_HTML) + set(default_build html) + endif() + + if(HTLATEX_COMPILER AND LATEX_MAIN_INPUT_SUBDIR) + message(STATUS + "Disabling HTML build for ${LATEX_TARGET_NAME}.tex because the main file is in subdirectory ${LATEX_MAIN_INPUT_SUBDIR}" + ) + # The code below to run HTML assumes that LATEX_TARGET.tex is in the + # current directory. I have tried to specify that LATEX_TARGET.tex is + # in a subdirectory. That makes the build targets correct, but the + # HTML build still fails (at least for htlatex) because files are not + # generated where expected. I am getting around the problem by simply + # disabling HTML in this case. If someone really cares, they can fix + # this, but make sure it runs on many platforms and build programs. + elseif(HTLATEX_COMPILER) + # htlatex places the output in a different location + set(HTML_OUTPUT "${output_dir}/${LATEX_TARGET}.html") + add_custom_command(OUTPUT ${HTML_OUTPUT} + COMMAND ${CMAKE_COMMAND} -E chdir ${output_dir} + ${HTLATEX_COMPILER} ${LATEX_MAIN_INPUT} + "${HTLATEX_COMPILER_TEX4HT_FLAGS}" + "${HTLATEX_COMPILER_TEX4HT_POSTPROCESSOR_FLAGS}" + "${HTLATEX_COMPILER_T4HT_POSTPROCESSOR_FLAGS}" + ${HTLATEX_COMPILER_ARGS} + DEPENDS + ${output_dir}/${LATEX_TARGET}.tex + ${output_dir}/${LATEX_TARGET}.dvi + VERBATIM + ) + add_custom_target(${html_target} + DEPENDS ${HTML_OUTPUT} ${dvi_target} + SOURCES ${all_latex_sources} + ) + if(NOT LATEX_EXCLUDE_FROM_DEFAULTS) + add_dependencies(html ${html_target}) + endif() + endif() + endif() + + # Set default targets. + if("${default_build}" STREQUAL "pdf") + add_custom_target(${LATEX_TARGET_NAME} DEPENDS ${pdf_target}) + elseif("${default_build}" STREQUAL "dvi") + add_custom_target(${LATEX_TARGET_NAME} DEPENDS ${dvi_target}) + elseif("${default_build}" STREQUAL "ps") + add_custom_target(${LATEX_TARGET_NAME} DEPENDS ${ps_target}) + elseif("${default_build}" STREQUAL "safepdf") + add_custom_target(${LATEX_TARGET_NAME} DEPENDS ${safepdf_target}) + elseif("${default_build}" STREQUAL "html") + add_custom_target(${LATEX_TARGET_NAME} DEPENDS ${html_target}) + else() + message(SEND_ERROR "LATEX_DEFAULT_BUILD set to an invalid value. See the documentation for that variable.") + endif() + + if(NOT LATEX_EXCLUDE_FROM_ALL) + add_custom_target(_${LATEX_TARGET_NAME} ALL DEPENDS ${LATEX_TARGET_NAME}) + endif() + + set_directory_properties(. + ADDITIONAL_MAKE_CLEAN_FILES "${auxiliary_clean_files}" + ) + + add_custom_target(${auxclean_target} + COMMENT "Cleaning auxiliary LaTeX files." + COMMAND ${CMAKE_COMMAND} -E remove ${auxiliary_clean_files} + ) + add_dependencies(auxclean ${auxclean_target}) +endfunction(add_latex_targets_internal) + +function(add_latex_targets latex_main_input) + latex_get_output_path(output_dir) + parse_add_latex_arguments(ADD_LATEX_TARGETS ${latex_main_input} ${ARGN}) + + add_latex_targets_internal() +endfunction(add_latex_targets) + +function(add_latex_document latex_main_input) + latex_get_output_path(output_dir) + if(output_dir) + parse_add_latex_arguments(add_latex_document ${latex_main_input} ${ARGN}) + + latex_copy_input_file(${LATEX_MAIN_INPUT}) + + foreach (bib_file ${LATEX_BIBFILES}) + latex_copy_input_file(${bib_file}) + endforeach (bib_file) + + foreach (input ${LATEX_INPUTS}) + latex_copy_input_file(${input}) + endforeach(input) + + latex_copy_globbed_files(${CMAKE_CURRENT_SOURCE_DIR}/*.cls ${output_dir}) + latex_copy_globbed_files(${CMAKE_CURRENT_SOURCE_DIR}/*.bst ${output_dir}) + latex_copy_globbed_files(${CMAKE_CURRENT_SOURCE_DIR}/*.clo ${output_dir}) + latex_copy_globbed_files(${CMAKE_CURRENT_SOURCE_DIR}/*.sty ${output_dir}) + latex_copy_globbed_files(${CMAKE_CURRENT_SOURCE_DIR}/*.ist ${output_dir}) + latex_copy_globbed_files(${CMAKE_CURRENT_SOURCE_DIR}/*.fd ${output_dir}) + + add_latex_targets_internal() + endif() +endfunction(add_latex_document) + +############################################################################# +# Actually do stuff +############################################################################# + +if(LATEX_BUILD_COMMAND) + set(command_handled) + + if("${LATEX_BUILD_COMMAND}" STREQUAL execute_latex) + latex_execute_latex() + set(command_handled TRUE) + endif() + + if("${LATEX_BUILD_COMMAND}" STREQUAL makeglossaries) + latex_makeglossaries() + set(command_handled TRUE) + endif() + + if("${LATEX_BUILD_COMMAND}" STREQUAL makenomenclature) + latex_makenomenclature() + set(command_handled TRUE) + endif() + + if("${LATEX_BUILD_COMMAND}" STREQUAL correct_synctex) + latex_correct_synctex() + set(command_handled TRUE) + endif() + + if("${LATEX_BUILD_COMMAND}" STREQUAL check_important_warnings) + latex_check_important_warnings() + set(command_handled TRUE) + endif() + + if(NOT command_handled) + message(SEND_ERROR "Unknown command: ${LATEX_BUILD_COMMAND}") + endif() + +else() + # Must be part of the actual configure (included from CMakeLists.txt). + latex_setup_variables() + latex_setup_targets() +endif() diff --git a/ton-test-liteclient-full/lite-client-with-webserver/CMakeLists.txt b/ton-test-liteclient-full/lite-client-with-webserver/CMakeLists.txt new file mode 100644 index 0000000..5a42d98 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/CMakeLists.txt @@ -0,0 +1,167 @@ +cmake_minimum_required(VERSION 3.0.2) + +project(LITE_CLIENT) + +add_subdirectory(lite-client/third-party/abseil-cpp EXCLUDE_FROM_ALL) + +set(CRC32C_BUILD_TESTS OFF CACHE BOOL "Build CRC32C's unit tests") +set(CRC32C_BUILD_BENCHMARKS OFF CACHE BOOL "Build CRC32C's benchmarks") +set(CRC32C_USE_GLOG OFF CACHE BOOL "Build CRC32C's tests with Google Logging") +set(CRC32C_INSTALL OFF CACHE BOOL "Install CRC32C's header and library") +add_subdirectory(lite-client/third-party/crc32c EXCLUDE_FROM_ALL) +set(CRC32C_FOUND 1) +set(HAVE_SSE42) + +option(USE_LIBRAPTORQ "use libraptorq for tests" OFF) +if (USE_LIBRAPTORQ) + set(USE_LZ4 OFF CACHE BOOL "use lz4") + if (${CMAKE_CXX_COMPILER_ID} MATCHES Clang) + set(CLANG_STDLIB "ON") # for libraptorq + endif() + add_subdirectory(lite-client/third-party/libraptorq EXCLUDE_FROM_ALL) +endif() + +set(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/CMake" ${CMAKE_MODULE_PATH}) + +set(CMAKE_POSITION_INDEPENDENT_CODE ON) + +# Configure CCache if available +find_program(CCACHE_FOUND ccache) +#set(CCACHE_FOUND 0) +if (CCACHE_FOUND) + message(STATUS "Found ccache") + set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache) + set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache) +else() + message(STATUS "Could NOT find ccache") +endif() + +if (${CMAKE_CXX_COMPILER_ID} STREQUAL GNU) + set(GCC 1) +elseif (${CMAKE_CXX_COMPILER_ID} MATCHES Clang) + set(CLANG 1) +elseif (${CMAKE_CXX_COMPILER_ID} STREQUAL Intel) + set(INTEL 1) +elseif (NOT MSVC) + message(FATAL_ERROR "Compiler isn't supported") +endif() + +include(CheckCXXCompilerFlag) + +if (GCC OR CLANG OR INTEL) + if (WIN32 AND INTEL) + set(STD14_FLAG /Qstd=c++14) + else() + set(STD14_FLAG -std=c++14) + endif() + check_cxx_compiler_flag(${STD14_FLAG} HAVE_STD14) + if (NOT HAVE_STD14) + string(REPLACE "c++14" "c++1y" STD14_FLAG "${STD14_FLAG}") + check_cxx_compiler_flag(${STD14_FLAG} HAVE_STD1Y) + set(HAVE_STD14 ${HAVE_STD1Y}) + endif() +elseif (MSVC) + set(HAVE_STD14 MSVC_VERSION>=1900) +endif() + +if (NOT HAVE_STD14) + message(FATAL_ERROR "No C++14 support in the compiler. Please upgrade the compiler.") +endif() + +set(CMAKE_THREAD_PREFER_PTHREAD ON) +set(THREADS_PREFER_PTHREAD_FLAG ON) +find_package(Threads REQUIRED) + +if (NOT MSVC) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=core2") +endif() +if (THREADS_HAVE_PTHREAD_ARG) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pthread") +endif() + +if (MSVC) + if (CMAKE_CXX_FLAGS_DEBUG MATCHES "/RTC1") + string(REPLACE "/RTC1" " " CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG}") + endif() + add_definitions(-D_SCL_SECURE_NO_WARNINGS -D_CRT_SECURE_NO_WARNINGS) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4 /wd4100 /wd4127 /wd4324 /wd4505 /wd4702") +elseif (CLANG OR GCC) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${STD14_FLAG} -fno-omit-frame-pointer") + if (APPLE) + #use "-Wl,-exported_symbols_list,${CMAKE_CURRENT_SOURCE_DIR}/export_list" for exported symbols + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fvisibility=hidden -Wl,-dead_strip,-x,-S") + else() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -ffunction-sections -fdata-sections") + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--gc-sections -Wl,--exclude-libs,ALL") + endif() +elseif (INTEL) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${STD14_FLAG}") +endif() + +if (WIN32) + add_definitions(-DNTDDI_VERSION=0x06020000 -DWINVER=0x0602 -D_WIN32_WINNT=0x0602 -DNOMINMAX -DUNICODE -D_UNICODE) +endif() +if (CYGWIN) + add_definitions(-D_DEFAULT_SOURCE=1 -DFD_SETSIZE=4096) +endif() + +if (NOT ANDROID) # _FILE_OFFSET_BITS is broken in ndk r15 and r15b and doesn't work prior to Android 7.0 + add_definitions(-D_FILE_OFFSET_BITS=64) +endif() + +include(AddCXXCompilerFlag) +if (NOT MSVC) + add_cxx_compiler_flag("-Wall") +endif() +add_cxx_compiler_flag("-Wextra") +add_cxx_compiler_flag("-Wimplicit-fallthrough=2") +add_cxx_compiler_flag("-Wpointer-arith") +add_cxx_compiler_flag("-Wcast-qual") +add_cxx_compiler_flag("-Wsign-compare") +add_cxx_compiler_flag("-Wduplicated-branches") +add_cxx_compiler_flag("-Wduplicated-cond") +add_cxx_compiler_flag("-Walloc-zero") +add_cxx_compiler_flag("-Wlogical-op") +add_cxx_compiler_flag("-Wno-tautological-compare") +add_cxx_compiler_flag("-Wpointer-arith") +add_cxx_compiler_flag("-Wvla") +add_cxx_compiler_flag("-Wnon-virtual-dtor") +add_cxx_compiler_flag("-Wno-unused-parameter") +add_cxx_compiler_flag("-Wconversion") +add_cxx_compiler_flag("-Wno-sign-conversion") +add_cxx_compiler_flag("-Qunused-arguments") +add_cxx_compiler_flag("-Wno-unused-private-field") + +#Compilation database +set(CMAKE_EXPORT_COMPILE_COMMANDS 1) + +function(target_link_libraries_system target) + set(libs ${ARGN}) + foreach(lib ${libs}) + get_target_property(lib_include_dirs ${lib} INTERFACE_INCLUDE_DIRECTORIES) + target_include_directories(${target} SYSTEM PUBLIC ${lib_include_dirs}) + target_link_libraries(${target} PUBLIC ${lib}) + endforeach(lib) +endfunction(target_link_libraries_system) + +add_subdirectory(lite-client/tdutils) +add_subdirectory(lite-client/tdactor) +add_subdirectory(lite-client/tdnet) +add_subdirectory(lite-client/tddb) +add_subdirectory(lite-client/tdtl) +add_subdirectory(lite-client/tl) +if (USE_LIBRAPTORQ) + target_link_libraries(test-fec PRIVATE third_party_fec) + target_compile_definitions(test-fec PRIVATE "USE_LIBRAPTORQ=1") +endif() + +add_subdirectory(lite-client/terminal) +add_subdirectory(lite-client/keys) +add_subdirectory(lite-client/tl-utils) +add_subdirectory(lite-client/adnl) + +add_subdirectory(lite-client/crypto) + +add_executable (test-lite-client-with-webserver src/lite-client.cpp) +target_link_libraries(test-lite-client-with-webserver tdutils tdactor adnllite tl_api + ton_crypto ton_block terminal) diff --git a/ton-test-liteclient-full/lite-client-with-webserver/lite-client b/ton-test-liteclient-full/lite-client-with-webserver/lite-client new file mode 120000 index 0000000..c25c7f8 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/lite-client @@ -0,0 +1 @@ +../lite-client \ No newline at end of file diff --git a/ton-test-liteclient-full/lite-client-with-webserver/src/lite-client.cpp b/ton-test-liteclient-full/lite-client-with-webserver/src/lite-client.cpp new file mode 100644 index 0000000..c2b5041 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/src/lite-client.cpp @@ -0,0 +1,1696 @@ +#include "web_server/lite-client-test-node.cpp" +#include "adnl/adnl-ext-client.h" +#include "tl-utils/tl-utils.hpp" +#include "auto/tl/ton_api_json.h" +#include "td/utils/OptionsParser.h" +#include "td/utils/Time.h" +#include "td/utils/filesystem.h" +#include "td/utils/format.h" +#include "td/utils/Random.h" +#include "td/utils/crypto.h" +#include "td/utils/port/signals.h" +#include "td/utils/port/stacktrace.h" +#include "td/utils/port/StdStreams.h" +#include "td/utils/port/FileFd.h" +#include "terminal/terminal.h" +#include "ton/ton-tl.hpp" +#include "block/block-db.h" +#include "block/block.h" +#include "block/block-auto.h" +#include "block/mc-config.h" +#include "vm/boc.h" +#include "vm/cellops.h" +#include "vm/cells/MerkleProof.h" +#include "ton/ton-shard.h" + +#if TD_DARWIN || TD_LINUX +#include +#include +#endif +#include +#include + +#include "web_server/lite-client-test-node-helpers.cpp" +#include "web_server/lite-client-web-server.cpp" +#include "web_server/include/method-time.cpp" +#include "web_server/include/method-getaccount.cpp" +#include "web_server/include/method-getblock.cpp" +#include "web_server/include/method-last.cpp" + +using td::Ref; + +int verbosity; + +template +std::ostream& operator<<(std::ostream& stream, const td::UInt& x) { + for (size_t i = 0; i < size / 8; i++) { + stream << td::format::hex_digit((x.raw[i] >> 4) & 15) << td::format::hex_digit(x.raw[i] & 15); + } + + return stream; +} + +std::unique_ptr TestNode::make_callback() { + class Callback : public ton::AdnlExtClient::Callback { + public: + void on_ready() override { + td::actor::send_closure(id_, &TestNode::conn_ready); + } + void on_stop_ready() override { + td::actor::send_closure(id_, &TestNode::conn_closed); + } + Callback(td::actor::ActorId id) : id_(std::move(id)) { + } + + private: + td::actor::ActorId id_; + }; + return std::make_unique(actor_id(this)); +} + +void TestNode::run() { + class Cb : public td::TerminalIO::Callback { + public: + void line_cb(td::BufferSlice line) override { + td::actor::send_closure(id_, &TestNode::parse_line, std::move(line)); + } + Cb(td::actor::ActorId id) : id_(id) { + } + + private: + td::actor::ActorId id_; + }; + io_ = td::TerminalIO::create("> ", readline_enabled_, std::make_unique(actor_id(this))); + td::actor::send_closure(io_, &td::TerminalIO::set_log_interface); + + auto G = td::read_file(global_config_).move_as_ok(); + auto gc_j = td::json_decode(G.as_slice()).move_as_ok(); + ton::ton_api::config_global gc; + ton::ton_api::from_json(gc, gc_j.get_object()).ensure(); + + CHECK(gc.liteclients_.size() > 0); + auto idx = + liteserver_idx_ >= 0 ? liteserver_idx_ : td::Random::fast(0, static_cast(gc.liteclients_.size() - 1)); + CHECK(idx >= 0 && static_cast(idx) <= gc.liteclients_.size()); + auto& cli = gc.liteclients_[idx]; + td::IPAddress addr; + addr.init_host_port(td::IPAddress::ipv4_to_str(cli->ip_), cli->port_).ensure(); + td::TerminalIO::out() << "using liteserver " << idx << " with addr " << addr << "\n"; + + client_ = ton::AdnlExtClient::create(ton::AdnlNodeIdFull{cli->id_}, addr, make_callback()); +} + +bool TestNode::envelope_send_query(td::BufferSlice query, td::Promise promise) { + if (!ready_ || client_.empty()) { + LOG(ERROR) << "failed to send query to server: not ready"; + return false; + } + auto P = td::PromiseCreator::lambda([promise = std::move(promise)](td::Result R) mutable { + if (R.is_error()) { + auto err = R.move_as_error(); + LOG(ERROR) << "failed query: " << err; + promise.set_error(std::move(err)); + return; + } + auto data = R.move_as_ok(); + auto F = ton::fetch_tl_object(data.clone(), true); + if (F.is_ok()) { + auto f = F.move_as_ok(); + auto err = td::Status::Error(f->code_, f->message_); + LOG(ERROR) << "received error: " << err; + promise.set_error(std::move(err)); + return; + } + promise.set_result(std::move(data)); + }); + td::BufferSlice b = + ton::serialize_tl_object(ton::create_tl_object(std::move(query)), true); + td::actor::send_closure(client_, &ton::AdnlExtClient::send_query, "query", std::move(b), td::Timestamp::in(10.0), + std::move(P)); + return true; +} + +bool TestNode::register_blkid(const ton::BlockIdExt& blkid) { + for (const auto& id : known_blk_ids_) { + if (id == blkid) { + return false; + } + } + known_blk_ids_.push_back(blkid); + return true; +} + +bool TestNode::show_new_blkids(bool all) { + if (all) { + shown_blk_ids_ = 0; + } + int cnt = 0; + while (shown_blk_ids_ < known_blk_ids_.size()) { + td::TerminalIO::out() << "BLK#" << shown_blk_ids_ + 1 << " = " << known_blk_ids_[shown_blk_ids_].to_str() + << std::endl; + ++shown_blk_ids_; + ++cnt; + } + return cnt; +} + +bool TestNode::complete_blkid(ton::BlockId partial_blkid, ton::BlockIdExt& complete_blkid) const { + auto n = known_blk_ids_.size(); + while (n) { + --n; + if (known_blk_ids_[n].id == partial_blkid) { + complete_blkid = known_blk_ids_[n]; + return true; + } + } + if (partial_blkid.is_masterchain() && partial_blkid.seqno == ~0U) { + complete_blkid.id = ton::BlockId{ton::masterchainId, ton::shardIdAll, ~0U}; + complete_blkid.root_hash.set_zero(); + complete_blkid.file_hash.set_zero(); + return true; + } + return false; +} + +bool TestNode::get_server_time() { + auto b = ton::serialize_tl_object(ton::create_tl_object(), true); + return envelope_send_query(std::move(b), [&, Self = actor_id(this) ](td::Result res)->void { + if (res.is_error()) { + LOG(ERROR) << "cannot get server time"; + return; + } else { + auto F = ton::fetch_tl_object(res.move_as_ok(), true); + if (F.is_error()) { + LOG(ERROR) << "cannot parse answer to liteServer.getTime"; + } else { + server_time_ = F.move_as_ok()->now_; + server_time_got_at_ = static_cast(td::Clocks::system()); + LOG(INFO) << "server time is " << server_time_; + } + } + }); +} + +bool TestNode::get_server_mc_block_id() { + auto b = ton::serialize_tl_object(ton::create_tl_object(), true); + return envelope_send_query(std::move(b), [Self = actor_id(this)](td::Result res)->void { + if (res.is_error()) { + LOG(ERROR) << "cannot get masterchain info from server"; + return; + } else { + auto F = ton::fetch_tl_object(res.move_as_ok(), true); + if (F.is_error()) { + LOG(ERROR) << "cannot parse answer to liteServer.getMasterchainInfo"; + } else { + auto f = F.move_as_ok(); + auto blk_id = create_block_id(f->last_); + auto zstate_id = create_zero_state_id(f->init_); + LOG(INFO) << "last masterchain block is " << blk_id.to_str(); + td::actor::send_closure_later(Self, &TestNode::got_server_mc_block_id, blk_id, zstate_id); + } + } + }); +} + +void TestNode::got_server_mc_block_id(ton::BlockIdExt blkid, ton::ZeroStateIdExt zstateid) { + if (!zstate_id_.is_valid()) { + zstate_id_ = zstateid; + LOG(INFO) << "zerostate id set to " << zstate_id_.to_str(); + } else if (zstate_id_ != zstateid) { + LOG(ERROR) << "fatal: masterchain zero state id suddenly changed: expected " << zstate_id_.to_str() << ", found " + << zstateid.to_str(); + stop(); + return; + } + register_blkid(blkid); + //register_blkid(zstateid); + if (!mc_last_id_.is_valid()) { + mc_last_id_ = blkid; + request_block(blkid); + // request_state(blkid); + } else if (mc_last_id_.id.seqno < blkid.id.seqno) { + mc_last_id_ = blkid; + } +} + +bool TestNode::request_block(ton::BlockIdExt blkid) { + auto b = ton::serialize_tl_object( + ton::create_tl_object(ton::create_tl_block_id(blkid)), true); + return envelope_send_query(std::move(b), [ Self = actor_id(this), blkid ](td::Result res)->void { + if (res.is_error()) { + LOG(ERROR) << "cannot obtain block " << blkid.to_str() << " from server"; + return; + } else { + auto F = ton::fetch_tl_object(res.move_as_ok(), true); + if (F.is_error()) { + LOG(ERROR) << "cannot parse answer to liteServer.getBlock"; + } else { + auto f = F.move_as_ok(); + auto blk_id = ton::create_block_id(f->id_); + LOG(INFO) << "obtained block " << blk_id.to_str() << " from server"; + if (blk_id != blkid) { + LOG(ERROR) << "block id mismatch: expected data for block " << blkid.to_str() << ", obtained for " + << blk_id.to_str(); + } + td::actor::send_closure_later(Self, &TestNode::got_mc_block, blk_id, std::move(f->data_)); + } + } + }); +} + +bool TestNode::request_state(ton::BlockIdExt blkid) { + auto b = ton::serialize_tl_object( + ton::create_tl_object(ton::create_tl_block_id(blkid)), true); + return envelope_send_query(std::move(b), [ Self = actor_id(this), blkid ](td::Result res)->void { + if (res.is_error()) { + LOG(ERROR) << "cannot obtain state " << blkid.to_str() << " from server"; + return; + } else { + auto F = ton::fetch_tl_object(res.move_as_ok(), true); + if (F.is_error()) { + LOG(ERROR) << "cannot parse answer to liteServer.getState"; + } else { + auto f = F.move_as_ok(); + auto blk_id = ton::create_block_id(f->id_); + LOG(INFO) << "obtained state " << blk_id.to_str() << " from server"; + if (blk_id != blkid) { + LOG(ERROR) << "block id mismatch: expected state for block " << blkid.to_str() << ", obtained for " + << blk_id.to_str(); + } + td::actor::send_closure_later(Self, &TestNode::got_mc_state, blk_id, ton::UInt256_2_Bits256(f->root_hash_), + ton::UInt256_2_Bits256(f->file_hash_), std::move(f->data_)); + } + } + }); +} + +void TestNode::got_mc_block(ton::BlockIdExt blkid, td::BufferSlice data) { + LOG(INFO) << "obtained " << data.size() << " data bytes for block " << blkid.to_str(); + ton::FileHash fhash; + td::sha256(data.as_slice(), fhash.as_slice()); + if (fhash != blkid.file_hash) { + LOG(ERROR) << "file hash mismatch for block " << blkid.to_str() << ": expected " << blkid.file_hash.to_hex() + << ", computed " << fhash.to_hex(); + return; + } + register_blkid(blkid); + last_block_id_ = blkid; + last_block_data_ = data.clone(); + if (!db_root_.empty()) { + auto res = save_db_file(fhash, std::move(data)); + if (res.is_error()) { + LOG(ERROR) << "error saving block file: " << res.to_string(); + } + } + show_new_blkids(); +} + +void TestNode::got_mc_state(ton::BlockIdExt blkid, ton::RootHash root_hash, ton::FileHash file_hash, + td::BufferSlice data) { + LOG(INFO) << "obtained " << data.size() << " state bytes for block " << blkid.to_str(); + ton::FileHash fhash; + td::sha256(data.as_slice(), fhash.as_slice()); + if (fhash != file_hash) { + LOG(ERROR) << "file hash mismatch for state " << blkid.to_str() << ": expected " << file_hash.to_hex() + << ", computed " << fhash.to_hex(); + return; + } + register_blkid(blkid); + last_state_id_ = blkid; + last_state_data_ = data.clone(); + if (!db_root_.empty()) { + auto res = save_db_file(fhash, std::move(data)); + if (res.is_error()) { + LOG(ERROR) << "error saving state file: " << res.to_string(); + } + } + show_new_blkids(); +} + +td::Status TestNode::save_db_file(ton::FileHash file_hash, td::BufferSlice data) { + std::string fname = block::compute_db_filename(db_root_ + '/', file_hash); + for (int i = 0; i < 10; i++) { + std::string tmp_fname = block::compute_db_tmp_filename(db_root_ + '/', file_hash, i); + auto res = block::save_binary_file(tmp_fname, data); + if (res.is_ok()) { + if (rename(tmp_fname.c_str(), fname.c_str()) < 0) { + int err = errno; + LOG(ERROR) << "cannot rename " << tmp_fname << " to " << fname << " : " << strerror(err); + return td::Status::Error(std::string{"cannot rename file: "} + strerror(err)); + } else { + LOG(INFO) << data.size() << " bytes saved into file " << fname; + return td::Status::OK(); + } + } else if (i == 9) { + return res; + } + } + return td::Status::Error("cannot save data file"); +} + +void TestNode::run_init_queries() { + get_server_time(); + get_server_mc_block_id(); +} + +std::string TestNode::get_word(char delim) { + if (delim == ' ' || !delim) { + skipspc(); + } + const char* ptr = parse_ptr_; + while (ptr < parse_end_ && *ptr != delim && (*ptr != '\t' || delim != ' ')) { + ptr++; + } + std::swap(ptr, parse_ptr_); + return std::string{ptr, parse_ptr_}; +} + +int TestNode::skipspc() { + int i = 0; + while (parse_ptr_ < parse_end_ && (*parse_ptr_ == ' ' || *parse_ptr_ == '\t')) { + i++; + parse_ptr_++; + } + return i; +} + +std::string TestNode::get_line_tail(bool remove_spaces) const { + const char *ptr = parse_ptr_, *end = parse_end_; + if (remove_spaces) { + while (ptr < end && (*ptr == ' ' || *ptr == '\t')) { + ptr++; + } + while (ptr < end && (end[-1] == ' ' || end[-1] == '\t')) { + --end; + } + } + return std::string{ptr, end}; +} + +bool TestNode::eoln() const { + return parse_ptr_ == parse_end_; +} + +bool TestNode::seekeoln() { + skipspc(); + return eoln(); +} + +bool TestNode::parse_account_addr(ton::WorkchainId& wc, ton::StdSmcAddress& addr) { + return block::parse_std_account_addr(get_word(), wc, addr) || set_error("cannot parse account address"); +} + +bool TestNode::parse_uint64(std::string word, td::uint64& val) { + val = ~0ULL; + if (word.empty()) { + return false; + } + const char* ptr = word.c_str(); + char* end = nullptr; + val = strtoull(ptr, &end, 10); + if (end == ptr + word.size()) { + return true; + } else { + val = ~0ULL; + return false; + } +} + +bool TestNode::parse_lt(ton::LogicalTime& lt) { + return parse_uint64(get_word(), lt) || set_error("cannot parse logical time"); +} + +bool TestNode::set_error(td::Status error) { + if (error.is_ok()) { + return true; + } + LOG(ERROR) << "error: " << error.to_string(); + if (error_.is_ok()) { + error_ = std::move(error); + } + return false; +} + +int TestNode::parse_hex_digit(int c) { + if (c >= '0' && c <= '9') { + return c - '0'; + } + c |= 0x20; + if (c >= 'a' && c <= 'z') { + return c - 'a' + 10; + } + return -1; +} + +bool TestNode::parse_hash(const char* str, ton::Bits256& hash) { + unsigned char* data = hash.data(); + for (int i = 0; i < 32; i++) { + int a = parse_hex_digit(str[2 * i]); + if (a < 0) { + return false; + } + int b = parse_hex_digit(str[2 * i + 1]); + if (b < 0) { + return false; + } + data[i] = (unsigned char)((a << 4) + b); + } + return true; +} + +bool TestNode::parse_block_id_ext(std::string blkid_str, ton::BlockIdExt& blkid, bool allow_incomplete) const { + if (blkid_str.empty()) { + return false; + } + auto fc = blkid_str[0]; + if (fc == 'B' || fc == '#') { + unsigned n = 0; + if (sscanf(blkid_str.c_str(), fc == 'B' ? "BLK#%u" : "#%u", &n) != 1 || !n || n > known_blk_ids_.size()) { + return false; + } + blkid = known_blk_ids_.at(n - 1); + return true; + } + if (blkid_str[0] != '(') { + return false; + } + auto pos = blkid_str.find(')'); + if (pos == std::string::npos || pos >= 38) { + return false; + } + char buffer[40]; + memcpy(buffer, blkid_str.c_str(), pos + 1); + buffer[pos + 1] = 0; + unsigned long long shard; + if (sscanf(buffer, "(%d,%016llx,%u)", &blkid.id.workchain, &shard, &blkid.id.seqno) != 3) { + return false; + } + blkid.id.shard = shard; + if (!blkid.id.is_valid_full()) { + return false; + } + pos++; + if (pos == blkid_str.size()) { + blkid.root_hash.set_zero(); + blkid.file_hash.set_zero(); + return complete_blkid(blkid.id, blkid) || allow_incomplete; + } + return pos + 2 * 65 == blkid_str.size() && blkid_str[pos] == ':' && blkid_str[pos + 65] == ':' && + parse_hash(blkid_str.c_str() + pos + 1, blkid.root_hash) && + parse_hash(blkid_str.c_str() + pos + 66, blkid.file_hash) && blkid.is_valid_full(); +} + +bool TestNode::parse_block_id_ext(ton::BlockIdExt& blk, bool allow_incomplete) { + return parse_block_id_ext(get_word(), blk, allow_incomplete) || set_error("cannot parse BlockIdExt"); +} + +bool TestNode::parse_hash(ton::Bits256& hash) { + auto word = get_word(); + return (!word.empty() && parse_hash(word.c_str(), hash)) || set_error("cannot parse hash"); +} + +bool TestNode::set_error(std::string err_msg) { + return set_error(td::Status::Error(-1, err_msg)); +} + +void TestNode::parse_line(td::BufferSlice data) { + line_ = data.as_slice().str(); + parse_ptr_ = line_.c_str(); + parse_end_ = parse_ptr_ + line_.size(); + error_ = td::Status::OK(); + if (seekeoln()) { + return; + } + if (!do_parse_line() || error_.is_error()) { + show_context(); + LOG(ERROR) << (error_.is_ok() ? "Syntax error" : error_.to_string()); + error_ = td::Status::OK(); + } + show_new_blkids(); +} + +void TestNode::show_context() const { + const char* ptr = line_.c_str(); + CHECK(parse_ptr_ >= ptr && parse_ptr_ <= parse_end_); + auto out = td::TerminalIO::out(); + for (; ptr < parse_ptr_; ptr++) { + out << (char)(*ptr == '\t' ? *ptr : ' '); + } + out << "^" << '\n'; +} + +bool TestNode::show_help(std::string command) { + td::TerminalIO::out() + << "list of available commands:\n" + "time\tGet server time\n" + "last\tGet last block and state info from server\n" + "sendfile \tLoad a serialized message from and send it to server\n" + "status\tShow connection and local database status\n" + "getaccount []\tLoads the most recent state of specified account; is in " + "[:] format\n" + "allshards []\tShows shard configuration from the most recent masterchain " + "state or from masterchain state corresponding to \n" + "gethead \tShows block header for \n" + "getblock \tDownloads block\n" + "dumpblock \tDownloads and dumps specified block\n" + "getstate \tDownloads state corresponding to specified block\n" + "dumpstate \tDownloads and dumps state corresponding to specified block\n" + "dumptrans \tDumps one transaction of specified account\n" + "lasttrans[dump] []\tShows or dumps specified transaction and " + "several preceding " + "ones\n" + "known\tShows the list of all known block ids\n" + "privkey \tLoads a private key from file\n" + "help []\tThis help\n" + "quit\tExit\n"; + return true; +} + +bool TestNode::do_parse_line() { + ton::WorkchainId workchain = ton::masterchainId; // change to basechain later + ton::StdSmcAddress addr; + ton::BlockIdExt blkid; + ton::LogicalTime lt; + ton::Bits256 hash; + std::string word = get_word(); + skipspc(); + if (word == "time") { + return eoln() && get_server_time(); + } else if (word == "setverbosity") { + return !eoln() && set_error(send_set_verbosity(get_word())) && eoln(); + } else if (word == "last") { + return eoln() && get_server_mc_block_id(); + } else if (word == "sendfile") { + return !eoln() && set_error(send_ext_msg_from_filename(get_line_tail())); + } else if (word == "getaccount") { + return parse_account_addr(workchain, addr) && + (seekeoln() ? get_account_state(workchain, addr, mc_last_id_) + : parse_block_id_ext(blkid) && seekeoln() && get_account_state(workchain, addr, blkid)); + } else if (word == "allshards") { + return eoln() ? get_all_shards() : (parse_block_id_ext(blkid) && seekeoln() && get_all_shards(false, blkid)); + } else if (word == "getblock") { + return parse_block_id_ext(blkid) && seekeoln() && get_block(blkid, false); + } else if (word == "dumpblock") { + return parse_block_id_ext(blkid) && seekeoln() && get_block(blkid, true); + } else if (word == "getstate") { + return parse_block_id_ext(blkid) && seekeoln() && get_state(blkid, false); + } else if (word == "dumpstate") { + return parse_block_id_ext(blkid) && seekeoln() && get_state(blkid, true); + } else if (word == "gethead") { + return parse_block_id_ext(blkid) && seekeoln() && get_block_header(blkid, 0xffff); + } else if (word == "dumptrans") { + return parse_block_id_ext(blkid) && parse_account_addr(workchain, addr) && parse_lt(lt) && seekeoln() && + get_one_transaction(blkid, workchain, addr, lt, true); + } else if (word == "lasttrans" || word == "lasttransdump") { + return parse_account_addr(workchain, addr) && parse_lt(lt) && parse_hash(hash) && seekeoln() && + get_last_transactions(workchain, addr, lt, hash, 10, word == "lasttransdump"); + } else if (word == "known") { + return eoln() && show_new_blkids(true); + } else if (word == "quit" && eoln()) { + LOG(INFO) << "Exiting"; + stop(); + // exit(0); + return true; + } else if (word == "help") { + return show_help(get_line_tail()); + } else { + td::TerminalIO::out() << "unknown command: " << word << " ; type `help` to get help" << '\n'; + return false; + } +} + +td::Status TestNode::send_ext_msg_from_filename(std::string filename) { + auto F = td::read_file(filename); + if (F.is_error()) { + auto err = F.move_as_error(); + LOG(ERROR) << "failed to read file `" << filename << "`: " << err.to_string(); + return err; + } + if (ready_ && !client_.empty()) { + LOG(ERROR) << "sending query from file " << filename; + auto P = td::PromiseCreator::lambda([](td::Result R) { + if (R.is_error()) { + return; + } + auto F = ton::fetch_tl_object(R.move_as_ok(), true); + if (F.is_error()) { + LOG(ERROR) << "cannot parse answer to liteServer.sendMessage"; + } else { + int status = F.move_as_ok()->status_; + LOG(INFO) << "external message status is " << status; + } + }); + auto b = + ton::serialize_tl_object(ton::create_tl_object(F.move_as_ok()), true); + return envelope_send_query(std::move(b), std::move(P)) ? td::Status::OK() + : td::Status::Error("cannot send query to server"); + } else { + return td::Status::Error("server connection not ready"); + } +} + +td::Status TestNode::send_set_verbosity(std::string verbosity_str) { + auto value = std::stoi(verbosity_str); + if (ready_ && !client_.empty()) { + LOG(ERROR) << "sending set verbosity " << value << " query"; + auto P = td::PromiseCreator::lambda([](td::Result R) { + if (R.is_error()) { + LOG(INFO) << "failed to set verbosity: " << R.move_as_error(); + return; + } + LOG(INFO) << "success set verbosity"; + }); + auto b = ton::serialize_tl_object(ton::create_tl_object( + ton::create_tl_object(value)), + true); + return envelope_send_query(std::move(b), std::move(P)) ? td::Status::OK() + : td::Status::Error("cannot send query to server"); + } else { + return td::Status::Error("server connection not ready"); + } +} + +bool TestNode::get_account_state(ton::WorkchainId workchain, ton::StdSmcAddress addr, ton::BlockIdExt ref_blkid) { + if (!ref_blkid.is_valid()) { + return set_error("must obtain last block information before making other queries"); + } + if (!(ready_ && !client_.empty())) { + return set_error("server connection not ready"); + } + auto a = ton::create_tl_object(workchain, ton::Bits256_2_UInt256(addr)); + auto b = ton::serialize_tl_object( + ton::create_tl_object(ton::create_tl_block_id(ref_blkid), std::move(a)), + true); + LOG(INFO) << "requesting account state for " << workchain << ":" << addr.to_hex() << " with respect to " + << ref_blkid.to_str(); + return envelope_send_query( + std::move(b), [ Self = actor_id(this), workchain, addr, ref_blkid ](td::Result R)->void { + if (R.is_error()) { + return; + } + auto F = ton::fetch_tl_object(R.move_as_ok(), true); + if (F.is_error()) { + LOG(ERROR) << "cannot parse answer to liteServer.getAccountState"; + } else { + auto f = F.move_as_ok(); + td::actor::send_closure_later(Self, &TestNode::got_account_state, ref_blkid, ton::create_block_id(f->id_), + ton::create_block_id(f->shardblk_), std::move(f->shard_proof_), + std::move(f->proof_), std::move(f->state_), workchain, addr); + } + }); +} + +bool TestNode::get_one_transaction(ton::BlockIdExt blkid, ton::WorkchainId workchain, ton::StdSmcAddress addr, + ton::LogicalTime lt, bool dump) { + if (!blkid.is_valid_full()) { + return set_error("invalid block id"); + } + if (!ton::shard_contains(blkid.shard_full(), ton::extract_addr_prefix(workchain, addr))) { + return set_error("the shard of this block cannot contain this account"); + } + if (!(ready_ && !client_.empty())) { + return set_error("server connection not ready"); + } + auto a = ton::create_tl_object(workchain, ton::Bits256_2_UInt256(addr)); + auto b = ton::serialize_tl_object(ton::create_tl_object( + ton::create_tl_block_id(blkid), std::move(a), lt), + true); + LOG(INFO) << "requesting transaction " << lt << " of " << workchain << ":" << addr.to_hex() << " from block " + << blkid.to_str(); + return envelope_send_query( + std::move(b), [ Self = actor_id(this), workchain, addr, lt, blkid, dump ](td::Result R)->void { + if (R.is_error()) { + return; + } + auto F = ton::fetch_tl_object(R.move_as_ok(), true); + if (F.is_error()) { + LOG(ERROR) << "cannot parse answer to liteServer.getOneTransaction"; + } else { + auto f = F.move_as_ok(); + td::actor::send_closure_later(Self, &TestNode::got_one_transaction, blkid, ton::create_block_id(f->id_), + std::move(f->proof_), std::move(f->transaction_), workchain, addr, lt, dump); + } + }); +} + +bool TestNode::get_last_transactions(ton::WorkchainId workchain, ton::StdSmcAddress addr, ton::LogicalTime lt, + ton::Bits256 hash, unsigned count, bool dump) { + if (!(ready_ && !client_.empty())) { + return set_error("server connection not ready"); + } + auto a = ton::create_tl_object(workchain, ton::Bits256_2_UInt256(addr)); + auto b = ton::serialize_tl_object(ton::create_tl_object( + count, std::move(a), lt, ton::Bits256_2_UInt256(hash)), + true); + LOG(INFO) << "requesting " << count << " last transactions from " << lt << ":" << hash.to_hex() << " of " << workchain + << ":" << addr.to_hex(); + return envelope_send_query( + std::move(b), [ Self = actor_id(this), workchain, addr, lt, hash, count, dump ](td::Result R) { + if (R.is_error()) { + return; + } + auto F = ton::fetch_tl_object(R.move_as_ok(), true); + if (F.is_error()) { + LOG(ERROR) << "cannot parse answer to liteServer.getTransactions"; + } else { + auto f = F.move_as_ok(); + std::vector blkids; + for (auto& id : f->ids_) { + blkids.push_back(ton::create_block_id(std::move(id))); + } + td::actor::send_closure_later(Self, &TestNode::got_last_transactions, std::move(blkids), + std::move(f->transactions_), workchain, addr, lt, hash, count, dump); + } + }); +} + +td::Status check_block_header_proof(Ref root, ton::BlockIdExt blkid, + ton::Bits256* store_shard_hash_to = nullptr, bool check_state_hash = false) { + ton::RootHash vhash{root->get_hash().bits()}; + if (vhash != blkid.root_hash) { + return td::Status::Error(PSTRING() << " block header for block " << blkid.to_str() << " has incorrect root hash " + << vhash.to_hex() << " instead of " << blkid.root_hash.to_hex()); + } + std::vector prev; + ton::BlockIdExt mc_blkid, blkid_u = blkid; + bool after_split; + auto res = block::unpack_block_prev_blk_ext(root, blkid_u, prev, mc_blkid, after_split); + if (res.is_error()) { + return res; + } + if (blkid_u.id != blkid.id || blkid_u.root_hash != blkid.root_hash) { + return td::Status::Error(PSTRING() << "block header claims it is for block " << blkid_u.to_str() << " not " + << blkid.to_str()); + } + block::gen::Block::Record blk; + block::gen::BlockInfo::Record info; + if (!(tlb::unpack_cell(root, blk) && tlb::unpack_cell(blk.info, info))) { + return td::Status::Error(std::string{"cannot unpack header for block "} + blkid.to_str()); + } + if (store_shard_hash_to) { + vm::CellSlice upd_cs{vm::NoVmSpec(), blk.state_update}; + if (!(upd_cs.is_special() && upd_cs.prefetch_long(8) == 4 // merkle update + && upd_cs.size_ext() == 0x20228)) { + return td::Status::Error("invalid Merkle update in block header"); + } + auto upd_hash = upd_cs.prefetch_ref(1)->get_hash(0); + if (!check_state_hash) { + *store_shard_hash_to = upd_hash.bits(); + } else if (store_shard_hash_to->compare(upd_hash.bits())) { + return td::Status::Error(PSTRING() << "state hash mismatch in block header of " << blkid.to_str() + << " : header declares " << upd_hash.bits().to_hex(256) << " expected " + << store_shard_hash_to->to_hex()); + } + } + return td::Status::OK(); +} + +void TestNode::got_account_state(ton::BlockIdExt ref_blk, ton::BlockIdExt blk, ton::BlockIdExt shard_blk, + td::BufferSlice shard_proof, td::BufferSlice proof, td::BufferSlice state, + ton::WorkchainId workchain, ton::StdSmcAddress addr) { + LOG(INFO) << "got account state for " << workchain << ":" << addr.to_hex() << " with respect to blocks " + << blk.to_str() << (shard_blk == blk ? "" : std::string{" and "} + shard_blk.to_str()); + Ref root; + if (!state.empty()) { + auto R = vm::std_boc_deserialize(state.clone()); + if (R.is_error()) { + LOG(ERROR) << "cannot deserialize account state"; + return; + } + root = R.move_as_ok(); + CHECK(root.not_null()); + } + if (blk != ref_blk && ref_blk.id.seqno != ~0U) { + LOG(ERROR) << "obtained getAccountState() for a different reference block " << blk.to_str() + << " instead of requested " << ref_blk.to_str(); + return; + } + if (!shard_blk.is_valid_full()) { + LOG(ERROR) << "shard block id " << shard_blk.to_str() << " in answer is invalid"; + return; + } + if (!ton::shard_contains(shard_blk.shard_full(), ton::extract_addr_prefix(workchain, addr))) { + LOG(ERROR) << "received data from shard block " << shard_blk.to_str() << " that cannot contain requested account " + << workchain << ":" << addr.to_hex(); + return; + } + if (blk != shard_blk) { + if (!blk.is_masterchain() || !blk.is_valid_full()) { + LOG(ERROR) << "reference block " << blk.to_str() << " for a getAccountState query must belong to the masterchain"; + return; + } + auto P = vm::std_boc_deserialize_multi(std::move(shard_proof)); + if (P.is_error()) { + LOG(ERROR) << "cannot deserialize shard configuration proof"; + return; + } + auto P_roots = P.move_as_ok(); + if (P_roots.size() != 2) { + LOG(ERROR) << "shard configuration proof must have exactly two roots"; + return; + } + try { + auto mc_state_root = vm::MerkleProof::virtualize(std::move(P_roots[1]), 1); + if (mc_state_root.is_null()) { + LOG(ERROR) << "shard configuration proof is invalid"; + return; + } + ton::Bits256 mc_state_hash = mc_state_root->get_hash().bits(); + auto res1 = + check_block_header_proof(vm::MerkleProof::virtualize(std::move(P_roots[0]), 1), blk, &mc_state_hash, true); + if (res1.is_error()) { + LOG(ERROR) << "error in shard configuration block header proof : " << res1.move_as_error().to_string(); + return; + } + block::gen::ShardStateUnsplit::Record sstate; + if (!(tlb::unpack_cell(mc_state_root, sstate))) { + LOG(ERROR) << "cannot unpack masterchain state header"; + return; + } + auto shards_dict = block::Config::extract_shard_hashes_dict(std::move(mc_state_root)); + if (!shards_dict) { + LOG(ERROR) << "cannot extract shard configuration dictionary from proof"; + return; + } + vm::CellSlice cs; + ton::ShardIdFull true_shard; + if (!block::ShardConfig::get_shard_hash_raw_from(*shards_dict, cs, shard_blk.shard_full(), true_shard)) { + LOG(ERROR) << "masterchain state contains no information for shard " << shard_blk.shard_full().to_str(); + return; + } + auto shard_info = block::McShardHash::unpack(cs, true_shard); + if (shard_info.is_null()) { + LOG(ERROR) << "cannot unpack information for shard " << shard_blk.shard_full().to_str() + << " from masterchain state"; + return; + } + if (shard_info->top_block_id() != shard_blk) { + LOG(ERROR) << "shard configuration mismatch: expected to find block " << shard_blk.to_str() << " , found " + << shard_info->top_block_id().to_str(); + return; + } + } catch (vm::VmError err) { + LOG(ERROR) << "error while traversing shard configuration proof : " << err.get_msg(); + return; + } catch (vm::VmVirtError err) { + LOG(ERROR) << "virtualization error while traversing shard configuration proof : " << err.get_msg(); + return; + } + } + auto Q = vm::std_boc_deserialize_multi(std::move(proof)); + if (Q.is_error()) { + LOG(ERROR) << "cannot deserialize account proof"; + return; + } + auto Q_roots = Q.move_as_ok(); + if (Q_roots.size() != 2) { + LOG(ERROR) << "account state proof must have exactly two roots"; + return; + } + ton::LogicalTime last_trans_lt = 0; + ton::Bits256 last_trans_hash; + last_trans_hash.set_zero(); + try { + auto state_root = vm::MerkleProof::virtualize(std::move(Q_roots[1]), 1); + if (state_root.is_null()) { + LOG(ERROR) << "account state proof is invalid"; + return; + } + ton::Bits256 state_hash = state_root->get_hash().bits(); + auto res1 = + check_block_header_proof(vm::MerkleProof::virtualize(std::move(Q_roots[0]), 1), shard_blk, &state_hash, true); + if (res1.is_error()) { + LOG(ERROR) << "error in account shard block header proof : " << res1.move_as_error().to_string(); + return; + } + block::gen::ShardStateUnsplit::Record sstate; + if (!(tlb::unpack_cell(std::move(state_root), sstate))) { + LOG(ERROR) << "cannot unpack state header"; + return; + } + vm::AugmentedDictionary accounts_dict{sstate.accounts->prefetch_ref(), 256, block::tlb::aug_ShardAccounts}; + auto acc_csr = accounts_dict.lookup(addr); + if (acc_csr.not_null()) { + if (root.is_null()) { + LOG(ERROR) << "account state proof shows that account state for " << workchain << ":" << addr.to_hex() + << " must be non-empty, but it actually is empty"; + return; + } + block::gen::ShardAccount::Record acc_info; + if (!tlb::csr_unpack(std::move(acc_csr), acc_info)) { + LOG(ERROR) << "cannot unpack ShardAccount from proof"; + return; + } + if (acc_info.account->get_hash().bits().compare(root->get_hash().bits(), 256)) { + LOG(ERROR) << "account state hash mismatch: Merkle proof expects " + << acc_info.account->get_hash().bits().to_hex(256) << " but received data has " + << root->get_hash().bits().to_hex(256); + return; + } + last_trans_hash = acc_info.last_trans_hash; + last_trans_lt = acc_info.last_trans_lt; + } else if (root.not_null()) { + LOG(ERROR) << "account state proof shows that account state for " << workchain << ":" << addr.to_hex() + << " must be empty, but it is not"; + return; + } + } catch (vm::VmError err) { + LOG(ERROR) << "error while traversing account proof : " << err.get_msg(); + return; + } catch (vm::VmVirtError err) { + LOG(ERROR) << "virtualization error while traversing account proof : " << err.get_msg(); + return; + } + auto out = td::TerminalIO::out(); + if (root.not_null()) { + out << "account state is "; + std::ostringstream outp; + block::gen::t_Account.print_ref(outp, root); + vm::load_cell_slice(root).print_rec(outp); + out << outp.str(); + out << "last transaction lt = " << last_trans_lt << " hash = " << last_trans_hash.to_hex() << std::endl; + } else { + out << "account state is empty" << std::endl; + } +} + +void TestNode::got_one_transaction(ton::BlockIdExt req_blkid, ton::BlockIdExt blkid, td::BufferSlice proof, + td::BufferSlice transaction, ton::WorkchainId workchain, ton::StdSmcAddress addr, + ton::LogicalTime trans_lt, bool dump) { + LOG(INFO) << "got transaction " << trans_lt << " for " << workchain << ":" << addr.to_hex() + << " with respect to block " << blkid.to_str(); + if (blkid != req_blkid) { + LOG(ERROR) << "obtained TransactionInfo for a different block " << blkid.to_str() << " instead of requested " + << req_blkid.to_str(); + return; + } + if (!ton::shard_contains(blkid.shard_full(), ton::extract_addr_prefix(workchain, addr))) { + LOG(ERROR) << "received data from block " << blkid.to_str() << " that cannot contain requested account " + << workchain << ":" << addr.to_hex(); + return; + } + Ref root; + if (!transaction.empty()) { + auto R = vm::std_boc_deserialize(std::move(transaction)); + if (R.is_error()) { + LOG(ERROR) << "cannot deserialize transaction"; + return; + } + root = R.move_as_ok(); + CHECK(root.not_null()); + } + auto P = vm::std_boc_deserialize(std::move(proof)); + if (P.is_error()) { + LOG(ERROR) << "cannot deserialize block transaction proof"; + return; + } + auto proof_root = P.move_as_ok(); + try { + auto block_root = vm::MerkleProof::virtualize(std::move(proof_root), 1); + if (block_root.is_null()) { + LOG(ERROR) << "transaction block proof is invalid"; + return; + } + auto res1 = check_block_header_proof(block_root, blkid); + if (res1.is_error()) { + LOG(ERROR) << "error in transaction block header proof : " << res1.move_as_error().to_string(); + return; + } + auto trans_root_res = block::get_block_transaction_try(std::move(block_root), workchain, addr, trans_lt); + if (trans_root_res.is_error()) { + LOG(ERROR) << trans_root_res.move_as_error().message(); + return; + } + auto trans_root = trans_root_res.move_as_ok(); + if (trans_root.is_null() && root.not_null()) { + LOG(ERROR) << "error checking transaction proof: proof claims there is no such transaction, but we have got " + "transaction data with hash " + << root->get_hash().bits().to_hex(256); + return; + } + if (trans_root.not_null() && root.is_null()) { + LOG(ERROR) << "error checking transaction proof: proof claims there is such a transaction with hash " + << trans_root->get_hash().bits().to_hex(256) + << ", but we have got no " + "transaction data"; + return; + } + if (trans_root.not_null() && trans_root->get_hash().bits().compare(root->get_hash().bits(), 256)) { + LOG(ERROR) << "transaction hash mismatch: Merkle proof expects " << trans_root->get_hash().bits().to_hex(256) + << " but received data has " << root->get_hash().bits().to_hex(256); + return; + } + } catch (vm::VmError err) { + LOG(ERROR) << "error while traversing block transaction proof : " << err.get_msg(); + return; + } catch (vm::VmVirtError err) { + LOG(ERROR) << "virtualization error while traversing block transaction proof : " << err.get_msg(); + return; + } + auto out = td::TerminalIO::out(); + if (root.is_null()) { + out << "transaction not found" << std::endl; + } else { + out << "transaction is "; + std::ostringstream outp; + block::gen::t_Transaction.print_ref(outp, root); + vm::load_cell_slice(root).print_rec(outp); + out << outp.str(); + } +} + +bool unpack_addr(std::ostream& os, Ref csr) { + ton::WorkchainId wc; + ton::StdSmcAddress addr; + if (!block::tlb::t_MsgAddressInt.extract_std_address(std::move(csr), wc, addr)) { + os << ""; + return false; + } + os << wc << ":" << addr.to_hex(); + return true; +} + +bool unpack_message(std::ostream& os, Ref msg, int mode) { + if (msg.is_null()) { + os << ""; + return true; + } + vm::CellSlice cs{vm::NoVmOrd(), msg}; + block::gen::CommonMsgInfo info; + Ref src, dest; + switch (block::gen::t_CommonMsgInfo.get_tag(cs)) { + case block::gen::CommonMsgInfo::ext_in_msg_info: { + block::gen::CommonMsgInfo::Record_ext_in_msg_info info; + if (!tlb::unpack(cs, info)) { + LOG(DEBUG) << "cannot unpack inbound external message"; + return false; + } + os << "EXT-IN-MSG"; + if (!(mode & 2)) { + os << " TO: "; + if (!unpack_addr(os, std::move(info.dest))) { + return false; + } + } + return true; + } + case block::gen::CommonMsgInfo::ext_out_msg_info: { + block::gen::CommonMsgInfo::Record_ext_out_msg_info info; + if (!tlb::unpack(cs, info)) { + LOG(DEBUG) << "cannot unpack outbound external message"; + return false; + } + os << "EXT-OUT-MSG"; + if (!(mode & 1)) { + os << " FROM: "; + if (!unpack_addr(os, std::move(info.src))) { + return false; + } + } + os << " LT:" << info.created_lt << " UTIME:" << info.created_at; + return true; + } + case block::gen::CommonMsgInfo::int_msg_info: { + block::gen::CommonMsgInfo::Record_int_msg_info info; + if (!tlb::unpack(cs, info)) { + LOG(DEBUG) << "cannot unpack internal message"; + return false; + } + os << "INT-MSG"; + if (!(mode & 1)) { + os << " FROM: "; + if (!unpack_addr(os, std::move(info.src))) { + return false; + } + } + if (!(mode & 2)) { + os << " TO: "; + if (!unpack_addr(os, std::move(info.dest))) { + return false; + } + } + os << " LT:" << info.created_lt << " UTIME:" << info.created_at; + td::RefInt256 value; + Ref extra; + if (!block::unpack_CurrencyCollection(info.value, value, extra)) { + LOG(ERROR) << "cannot unpack message value"; + return false; + } + os << " VALUE:" << value; + if (extra.not_null()) { + os << "+extra"; + } + return true; + } + default: + LOG(ERROR) << "cannot unpack message"; + return false; + } +} + +std::string message_info_str(Ref msg, int mode) { + std::ostringstream os; + if (!unpack_message(os, msg, mode)) { + return ""; + } else { + return os.str(); + } +} + +void TestNode::got_last_transactions(std::vector blkids, td::BufferSlice transactions_boc, + ton::WorkchainId workchain, ton::StdSmcAddress addr, ton::LogicalTime lt, + ton::Bits256 hash, unsigned count, bool dump) { + LOG(INFO) << "got up to " << count << " transactions for " << workchain << ":" << addr.to_hex() + << " from last transaction " << lt << ":" << hash.to_hex(); + auto R = vm::std_boc_deserialize_multi(std::move(transactions_boc)); + if (R.is_error()) { + LOG(ERROR) << "cannot deserialize transactions BoC"; + return; + } + auto list = R.move_as_ok(); + auto n = list.size(); + if (n > count) { + LOG(ERROR) << "obtained " << n << " transaction, but only " << count << " have been requested"; + return; + } + if (n != blkids.size()) { + LOG(ERROR) << "transaction list size " << n << " must be equal to the size of block id list " << blkids.size(); + return; + } + auto out = td::TerminalIO::out(); + unsigned c = 0; + for (auto& root : list) { + const auto& blkid = blkids[c++]; + if (root.is_null()) { + out << "transaction #" << c << " from block " << blkid.to_str() << " not found" << std::endl; + LOG(ERROR) << "transactions are expected to be non-empty"; + return; + } + if (hash != root->get_hash().bits()) { + LOG(ERROR) << "transaction hash mismatch: expected " << hash.to_hex() << ", found " + << root->get_hash().bits().to_hex(256); + return; + } + out << "transaction #" << c << " from block " << blkid.to_str() << (dump ? " is " : "\n"); + if (dump) { + std::ostringstream outp; + block::gen::t_Transaction.print_ref(outp, root); + vm::load_cell_slice(root).print_rec(outp); + out << outp.str(); + } + block::gen::Transaction::Record trans; + if (!tlb::unpack_cell(root, trans)) { + LOG(ERROR) << "cannot unpack transaction #" << c; + return; + } + if (trans.lt != lt) { + LOG(ERROR) << "transaction lt mismatch: expected " << lt << ", found " << trans.lt; + return; + } + lt = trans.prev_trans_lt; + hash = trans.prev_trans_hash; + out << " time=" << trans.now << " outmsg_cnt=" << trans.outmsg_cnt << std::endl; + auto in_msg = trans.in_msg->prefetch_ref(); + if (in_msg.is_null()) { + out << " (no inbound message)" << std::endl; + } else { + out << " inbound message: " << message_info_str(in_msg, 2 * 0) << std::endl; + if (dump) { + out << " " << block::gen::t_Message_Any.as_string_ref(in_msg, 4); // indentation = 4 spaces + } + } + vm::Dictionary dict{trans.out_msgs, 15}; + for (int x = 0; x < trans.outmsg_cnt && x < 100; x++) { + auto out_msg = dict.lookup_ref(td::BitArray<15>{x}); + out << " outbound message #" << x << ": " << message_info_str(out_msg, 1 * 0) << std::endl; + if (dump) { + out << " " << block::gen::t_Message_Any.as_string_ref(out_msg, 4); + } + } + register_blkid(blkid); // unsafe? + } + if (lt > 0) { + out << "previous transaction has lt " << lt << " hash " << hash.to_hex() << std::endl; + if (c < count) { + LOG(WARNING) << "obtained less transactions than required"; + } + } else { + out << "no preceding transactions (list complete)" << std::endl; + } +} + +bool TestNode::get_all_shards(bool use_last, ton::BlockIdExt blkid) { + if (use_last) { + blkid = mc_last_id_; + } + if (!blkid.is_valid_full()) { + return set_error(use_last ? "must obtain last block information before making other queries" + : "invalid masterchain block id"); + } + if (!blkid.is_masterchain()) { + return set_error("only masterchain blocks contain shard configuration"); + } + if (!(ready_ && !client_.empty())) { + return set_error("server connection not ready"); + } + auto b = ton::serialize_tl_object( + ton::create_tl_object(ton::create_tl_block_id(blkid)), true); + LOG(INFO) << "requesting recent shard configuration"; + return envelope_send_query(std::move(b), [Self = actor_id(this)](td::Result R)->void { + if (R.is_error()) { + return; + } + auto F = ton::fetch_tl_object(R.move_as_ok(), true); + if (F.is_error()) { + LOG(ERROR) << "cannot parse answer to liteServer.getAllShardsInfo"; + } else { + auto f = F.move_as_ok(); + td::actor::send_closure_later(Self, &TestNode::got_all_shards, ton::create_block_id(f->id_), std::move(f->proof_), + std::move(f->data_)); + } + }); +} + +void TestNode::got_all_shards(ton::BlockIdExt blk, td::BufferSlice proof, td::BufferSlice data) { + LOG(INFO) << "got shard configuration with respect to block " << blk.to_str(); + if (data.empty()) { + td::TerminalIO::out() << "shard configuration is empty" << '\n'; + } else { + auto R = vm::std_boc_deserialize(data.clone()); + if (R.is_error()) { + LOG(ERROR) << "cannot deserialize shard configuration"; + return; + } + auto root = R.move_as_ok(); + auto out = td::TerminalIO::out(); + out << "shard configuration is "; + std::ostringstream outp; + block::gen::t_ShardHashes.print_ref(outp, root); + vm::load_cell_slice(root).print_rec(outp); + out << outp.str(); + block::ShardConfig sh_conf; + if (!sh_conf.unpack(vm::load_cell_slice_ref(root))) { + out << "cannot extract shard block list from shard configuration\n"; + } else { + auto ids = sh_conf.get_shard_hash_ids(true); + int cnt = 0; + for (auto id : ids) { + auto ref = sh_conf.get_shard_hash(ton::ShardIdFull(id)); + if (ref.not_null()) { + register_blkid(ref->top_block_id()); + out << "shard #" << ++cnt << " : " << ref->top_block_id().to_str() << " @ " << ref->created_at() << " lt " + << ref->start_lt() << " .. " << ref->end_lt() << std::endl; + } else { + out << "shard #" << ++cnt << " : " << id.to_str() << " (cannot unpack)\n"; + } + } + } + } + show_new_blkids(); +} + +bool TestNode::get_block(ton::BlockIdExt blkid, bool dump) { + LOG(INFO) << "got block download request for " << blkid.to_str(); + auto b = ton::serialize_tl_object( + ton::create_tl_object(ton::create_tl_block_id(blkid)), true); + return envelope_send_query( + std::move(b), [ Self = actor_id(this), blkid, dump ](td::Result res)->void { + if (res.is_error()) { + LOG(ERROR) << "cannot obtain block " << blkid.to_str() + << " from server : " << res.move_as_error().to_string(); + return; + } else { + auto F = ton::fetch_tl_object(res.move_as_ok(), true); + if (F.is_error()) { + LOG(ERROR) << "cannot parse answer to liteServer.getBlock : " << res.move_as_error().to_string(); + } else { + auto f = F.move_as_ok(); + auto blk_id = ton::create_block_id(f->id_); + LOG(INFO) << "obtained block " << blk_id.to_str() << " from server"; + if (blk_id != blkid) { + LOG(ERROR) << "block id mismatch: expected data for block " << blkid.to_str() << ", obtained for " + << blk_id.to_str(); + return; + } + td::actor::send_closure_later(Self, &TestNode::got_block, blk_id, std::move(f->data_), dump); + } + } + }); +} + +bool TestNode::get_state(ton::BlockIdExt blkid, bool dump) { + LOG(INFO) << "got state download request for " << blkid.to_str(); + auto b = ton::serialize_tl_object( + ton::create_tl_object(ton::create_tl_block_id(blkid)), true); + return envelope_send_query( + std::move(b), [ Self = actor_id(this), blkid, dump ](td::Result res)->void { + if (res.is_error()) { + LOG(ERROR) << "cannot obtain state " << blkid.to_str() + << " from server : " << res.move_as_error().to_string(); + return; + } else { + auto F = ton::fetch_tl_object(res.move_as_ok(), true); + if (F.is_error()) { + LOG(ERROR) << "cannot parse answer to liteServer.getState"; + } else { + auto f = F.move_as_ok(); + auto blk_id = ton::create_block_id(f->id_); + LOG(INFO) << "obtained state " << blk_id.to_str() << " from server"; + if (blk_id != blkid) { + LOG(ERROR) << "block id mismatch: expected state for block " << blkid.to_str() << ", obtained for " + << blk_id.to_str(); + return; + } + td::actor::send_closure_later(Self, &TestNode::got_state, blk_id, ton::UInt256_2_Bits256(f->root_hash_), + ton::UInt256_2_Bits256(f->file_hash_), std::move(f->data_), dump); + } + } + }); +} + +void TestNode::got_block(ton::BlockIdExt blkid, td::BufferSlice data, bool dump) { + LOG(INFO) << "obtained " << data.size() << " data bytes for block " << blkid.to_str(); + ton::FileHash fhash; + td::sha256(data.as_slice(), fhash.as_slice()); + if (fhash != blkid.file_hash) { + LOG(ERROR) << "file hash mismatch for block " << blkid.to_str() << ": expected " << blkid.file_hash.to_hex() + << ", computed " << fhash.to_hex(); + return; + } + register_blkid(blkid); + if (!db_root_.empty()) { + auto res = save_db_file(fhash, data.clone()); + if (res.is_error()) { + LOG(ERROR) << "error saving block file: " << res.to_string(); + } + } + if (dump) { + auto res = vm::std_boc_deserialize(data.clone()); + if (res.is_error()) { + LOG(ERROR) << "cannot deserialize block data : " << res.move_as_error().to_string(); + return; + } + auto root = res.move_as_ok(); + ton::RootHash rhash{root->get_hash().bits()}; + if (rhash != blkid.root_hash) { + LOG(ERROR) << "block root hash mismatch: data has " << rhash.to_hex() << " , expected " + << blkid.root_hash.to_hex(); + return; + } + auto out = td::TerminalIO::out(); + out << "block contents is "; + std::ostringstream outp; + block::gen::t_Block.print_ref(outp, root); + vm::load_cell_slice(root).print_rec(outp); + out << outp.str(); + show_block_header(blkid, std::move(root), 0xffff); + } else { + auto res = lazy_boc_deserialize(data.clone()); + if (res.is_error()) { + LOG(ERROR) << "cannot lazily deserialize block data : " << res.move_as_error().to_string(); + return; + } + auto pair = res.move_as_ok(); + auto root = std::move(pair.first); + ton::RootHash rhash{root->get_hash().bits()}; + if (rhash != blkid.root_hash) { + LOG(ERROR) << "block root hash mismatch: data has " << rhash.to_hex() << " , expected " + << blkid.root_hash.to_hex(); + return; + } + show_block_header(blkid, std::move(root), 0xffff); + } + show_new_blkids(); +} + +void TestNode::got_state(ton::BlockIdExt blkid, ton::RootHash root_hash, ton::FileHash file_hash, td::BufferSlice data, + bool dump) { + LOG(INFO) << "obtained " << data.size() << " state bytes for block " << blkid.to_str(); + ton::FileHash fhash; + td::sha256(data.as_slice(), fhash.as_slice()); + if (fhash != file_hash) { + LOG(ERROR) << "file hash mismatch for state " << blkid.to_str() << ": expected " << file_hash.to_hex() + << ", computed " << fhash.to_hex(); + return; + } + register_blkid(blkid); + if (!db_root_.empty()) { + auto res = save_db_file(fhash, std::move(data)); + if (res.is_error()) { + LOG(ERROR) << "error saving state file: " << res.to_string(); + } + } + if (dump) { + auto res = vm::std_boc_deserialize(data.clone()); + if (res.is_error()) { + LOG(ERROR) << "cannot deserialize block data : " << res.move_as_error().to_string(); + return; + } + auto root = res.move_as_ok(); + ton::RootHash rhash{root->get_hash().bits()}; + if (rhash != root_hash) { + LOG(ERROR) << "block state root hash mismatch: data has " << rhash.to_hex() << " , expected " + << root_hash.to_hex(); + return; + } + auto out = td::TerminalIO::out(); + out << "shard state contents is "; + std::ostringstream outp; + block::gen::t_ShardState.print_ref(outp, root); + vm::load_cell_slice(root).print_rec(outp); + out << outp.str(); + show_state_header(blkid, std::move(root), 0xffff); + } else { + auto res = lazy_boc_deserialize(data.clone()); + if (res.is_error()) { + LOG(ERROR) << "cannot lazily deserialize block data : " << res.move_as_error().to_string(); + return; + } + auto pair = res.move_as_ok(); + auto root = std::move(pair.first); + ton::RootHash rhash{root->get_hash().bits()}; + if (rhash != root_hash) { + LOG(ERROR) << "block state root hash mismatch: data has " << rhash.to_hex() << " , expected " + << root_hash.to_hex(); + return; + } + show_state_header(blkid, std::move(root), 0xffff); + } + show_new_blkids(); +} + +bool TestNode::get_block_header(ton::BlockIdExt blkid, int mode) { + LOG(INFO) << "got block header request for " << blkid.to_str() << " with mode " << mode; + auto b = ton::serialize_tl_object( + ton::create_tl_object(ton::create_tl_block_id(blkid), mode), true); + return envelope_send_query(std::move(b), [ Self = actor_id(this), blkid ](td::Result res)->void { + if (res.is_error()) { + LOG(ERROR) << "cannot obtain block header for " << blkid.to_str() + << " from server : " << res.move_as_error().to_string(); + return; + } else { + auto F = ton::fetch_tl_object(res.move_as_ok(), true); + if (F.is_error()) { + LOG(ERROR) << "cannot parse answer to liteServer.getBlockHeader : " << res.move_as_error().to_string(); + } else { + auto f = F.move_as_ok(); + auto blk_id = ton::create_block_id(f->id_); + LOG(INFO) << "obtained block header for " << blk_id.to_str() << " from server"; + if (blk_id != blkid) { + LOG(ERROR) << "block id mismatch: expected data for block " << blkid.to_str() << ", obtained for " + << blk_id.to_str(); + } + td::actor::send_closure_later(Self, &TestNode::got_block_header, blk_id, std::move(f->header_proof_), f->mode_); + } + } + }); + return false; +} + +bool TestNode::show_block_header(ton::BlockIdExt blkid, Ref root, int mode) { + ton::RootHash vhash{root->get_hash().bits()}; + if (vhash != blkid.root_hash) { + LOG(ERROR) << " block header for block " << blkid.to_str() << " has incorrect root hash " << vhash.to_hex() + << " instead of " << blkid.root_hash.to_hex(); + return false; + } + std::vector prev; + ton::BlockIdExt mc_blkid, blkid_u = blkid; + bool after_split; + auto res = block::unpack_block_prev_blk_ext(root, blkid_u, prev, mc_blkid, after_split); + if (res.is_error()) { + LOG(ERROR) << "cannot unpack header for block " << blkid.to_str() << " : " << res.to_string(); + return false; + } + if (blkid_u.id != blkid.id || blkid_u.root_hash != blkid.root_hash) { + LOG(ERROR) << "block header claims it is for block " << blkid_u.to_str() << " not " << blkid.to_str(); + return false; + } + block::gen::Block::Record blk; + block::gen::BlockInfo::Record info; + if (!(tlb::unpack_cell(root, blk) && tlb::unpack_cell(blk.info, info))) { + LOG(ERROR) << "cannot unpack header for block " << blkid.to_str(); + return false; + } + auto out = td::TerminalIO::out(); + out << "block header of " << blkid.to_str() << " @ " << info.gen_utime << " lt " << info.start_lt << " .. " + << info.end_lt << std::endl; + out << "global_id=" << blk.global_id << " version=" << info.version << " not_master=" << info.not_master + << " after_merge=" << info.after_merge << " after_split=" << info.after_split + << " before_split=" << info.before_split << " want_merge=" << info.want_merge << " want_split=" << info.want_split + << " validator_list_hash_short=" << info.gen_validator_list_hash_short + << " catchain_seqno=" << info.gen_catchain_seqno << " min_ref_mc_seqno=" << info.min_ref_mc_seqno << std::endl; + register_blkid(blkid); + int cnt = 0; + for (auto id : prev) { + out << "previous block #" << ++cnt << " : " << id.to_str() << std::endl; + register_blkid(id); + } + out << "reference masterchain block : " << mc_blkid.to_str() << std::endl; + register_blkid(mc_blkid); + return true; +} + +bool TestNode::show_state_header(ton::BlockIdExt blkid, Ref root, int mode) { + return true; +} + +void TestNode::got_block_header(ton::BlockIdExt blkid, td::BufferSlice data, int mode) { + LOG(INFO) << "obtained " << data.size() << " data bytes as block header for " << blkid.to_str(); + auto res = vm::std_boc_deserialize(data.clone()); + if (res.is_error()) { + LOG(ERROR) << "cannot deserialize block header data : " << res.move_as_error().to_string(); + return; + } + auto root = res.move_as_ok(); + std::ostringstream outp; + vm::CellSlice cs{vm::NoVm{}, root}; + cs.print_rec(outp); + td::TerminalIO::out() << outp.str(); + try { + auto virt_root = vm::MerkleProof::virtualize(root, 1); + if (virt_root.is_null()) { + LOG(ERROR) << " block header proof for block " << blkid.to_str() << " is not a valid Merkle proof"; + return; + } + show_block_header(blkid, std::move(virt_root), mode); + } catch (vm::VmError err) { + LOG(ERROR) << "error processing header for " << blkid.to_str() << " : " << err.get_msg(); + } catch (vm::VmVirtError err) { + LOG(ERROR) << "error processing header for " << blkid.to_str() << " : " << err.get_msg(); + } + show_new_blkids(); +} + +td::Result get_uint256(std::string str) { + if (str.size() != 64) { + return td::Status::Error("uint256 must have 64 bytes"); + } + td::UInt256 res; + for (size_t i = 0; i < 32; i++) { + res.raw[i] = static_cast(td::hex_to_int(str[2 * i]) * 16 + td::hex_to_int(str[2 * i + 1])); + } + return res; +} + +void run_updater(td::actor::Scheduler* scheduler, td::actor::ActorOwn* x){ + unsigned int microseconds = 2000000; + while(true){ + usleep(microseconds); + scheduler -> run_in_context([&] { + td::actor::send_closure(x -> get(), &TestNode::web_last); + }); + } +} + +int main(int argc, char* argv[]) { + SET_VERBOSITY_LEVEL(verbosity_INFO); + td::set_default_failure_signal_handler(); + + td::actor::ActorOwn x; + + td::OptionsParser p; + p.set_description("Test Lite Client for TON Blockchain"); + p.add_option('h', "help", "prints_help", [&]() { + char b[10240]; + td::StringBuilder sb(td::MutableSlice{b, 10000}); + sb << p; + std::cout << sb.as_cslice().c_str(); + std::exit(2); + return td::Status::OK(); + }); + p.add_option('C', "global-config", "file to read global config", [&](td::Slice fname) { + td::actor::send_closure(x, &TestNode::set_global_config, fname.str()); + return td::Status::OK(); + }); + p.add_option('c', "local-config", "file to read local config", [&](td::Slice fname) { + td::actor::send_closure(x, &TestNode::set_local_config, fname.str()); + return td::Status::OK(); + }); + p.add_option('r', "disable-readline", "", [&]() { + td::actor::send_closure(x, &TestNode::set_readline_enabled, false); + return td::Status::OK(); + }); + p.add_option('R', "enable-readline", "", [&]() { + td::actor::send_closure(x, &TestNode::set_readline_enabled, true); + return td::Status::OK(); + }); + p.add_option('D', "db", "root for dbs", [&](td::Slice fname) { + td::actor::send_closure(x, &TestNode::set_db_root, fname.str()); + return td::Status::OK(); + }); + p.add_option('v', "verbosity", "set verbosity level", [&](td::Slice arg) { + verbosity = td::to_integer(arg); + SET_VERBOSITY_LEVEL(VERBOSITY_NAME(FATAL) + verbosity); + return (verbosity >= 0 && verbosity <= 9) ? td::Status::OK() : td::Status::Error("verbosity must be 0..9"); + }); + p.add_option('i', "idx", "set liteserver idx", [&](td::Slice arg) { + auto idx = td::to_integer(arg); + td::actor::send_closure(x, &TestNode::set_liteserver_idx, idx); + return td::Status::OK(); + }); + p.add_option('d', "daemonize", "set SIGHUP", [&]() { + td::set_signal_handler(td::SignalType::HangUp, + [](int sig) { +#if TD_DARWIN || TD_LINUX + close(0); + setsid(); +#endif + }) + .ensure(); + return td::Status::OK(); + }); +#if TD_DARWIN || TD_LINUX + p.add_option('l', "logname", "log to file", [&](td::Slice fname) { + auto FileLog = td::FileFd::open(td::CSlice(fname.str().c_str()), + td::FileFd::Flags::Create | td::FileFd::Flags::Append | td::FileFd::Flags::Write) + .move_as_ok(); + + dup2(FileLog.get_native_fd().fd(), 1); + dup2(FileLog.get_native_fd().fd(), 2); + return td::Status::OK(); + }); +#endif + + td::actor::Scheduler scheduler({2}); + + scheduler.run_in_context([&] { x = td::actor::create_actor("testnode"); }); + + scheduler.run_in_context([&] { p.run(argc, argv).ensure(); }); + scheduler.run_in_context([&] { + td::actor::send_closure(x, &TestNode::run); + // TMP disable release due to having an ability to call obj in another threads + // TODO: do requests directly w/o using actors + // x.release(); + }); + + // web server thread + std::thread webserver = std::thread(TestNode::run_web_server, &scheduler, &x); + + // updater thread called 'last' command + std::thread updater = std::thread(run_updater, &scheduler, &x); + + scheduler.run(); + + return 0; +} \ No newline at end of file diff --git a/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/include/method-getaccount.cpp b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/include/method-getaccount.cpp new file mode 100644 index 0000000..16334b1 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/include/method-getaccount.cpp @@ -0,0 +1,60 @@ + +void TestNode::get_account_state_web(std::string address, std::shared_ptr response) { + ton::WorkchainId workchain = ton::masterchainId; // change to basechain later + ton::StdSmcAddress addr; + if (!TestNode::parse_account_addr(address, workchain, addr)){ + web_error_response(response, "parse_account_addr fail"); + return; + } + + if (!mc_last_id_.is_valid()) { + web_error_response(response, "must obtain last block information before making other queries"); + return; + } + if (!(ready_ && !client_.empty())) { + web_error_response(response, "server connection not ready"); + return; + } + + auto a = ton::create_tl_object(workchain, ton::Bits256_2_UInt256(addr)); + auto b = ton::serialize_tl_object(ton::create_tl_object( + ton::create_tl_block_id(mc_last_id_), std::move(a)), + true); + + envelope_send_web( + std::move(b), [Self = actor_id(this), workchain, addr, response](td::Result R) -> void { + if (R.is_error()) { + web_error_response(response, "Unknown Error"); + return; + } + auto F = ton::fetch_tl_object(R.move_as_ok(), true); + if (F.is_error()) { + web_error_response(response, "cannot parse answer to liteServer.getAccountState"); + } else { + auto f = F.move_as_ok(); + td::actor::send_closure_later(Self, &TestNode::got_account_state_web, ton::create_block_id(f->id_), + ton::create_block_id(f->shardblk_), std::move(f->shard_proof_), + std::move(f->proof_), std::move(f->state_), workchain, addr, response); + } + }, response); +} + +void TestNode::got_account_state_web(ton::BlockIdExt blk, ton::BlockIdExt shard_blk, td::BufferSlice shard_proof, + td::BufferSlice proof, td::BufferSlice state, ton::WorkchainId workchain, + ton::StdSmcAddress addr, std::shared_ptr response) { + if (state.empty()) { + web_error_response(response, "account state is empty"); + } else { + auto R = vm::std_boc_deserialize(state.clone()); + if (R.is_error()) { + web_error_response(response, "cannot deserialize account state"); + return; + } + auto root = R.move_as_ok(); + std::ostringstream outp; + block::gen::t_Account.print_ref(outp, root); + vm::load_cell_slice(root).print_rec(outp); + + web_success_response(response, outp.str()); + } +} \ No newline at end of file diff --git a/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/include/method-getblock.cpp b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/include/method-getblock.cpp new file mode 100644 index 0000000..52de68e --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/include/method-getblock.cpp @@ -0,0 +1,133 @@ +void TestNode::get_block_web(std::string blkid_str, std::shared_ptr response, bool dump) { + ton::BlockIdExt blkid; + if(!TestNode::parse_block_id_ext(blkid_str, blkid, true)) + { + web_error_response(response, "cannot parse block_id"); + } + + auto b = ton::serialize_tl_object( + ton::create_tl_object(ton::create_tl_block_id(blkid)), true); + envelope_send_query( + std::move(b), [ Self = actor_id(this), blkid, dump, response](td::Result res)->void { + if (res.is_error()) { + web_error_response(response, "cannot obtain block " + blkid.to_str() + \ + " from server : " + res.move_as_error().to_string()); + return; + } else { + auto F = ton::fetch_tl_object(res.move_as_ok(), true); + if (F.is_error()) { + web_error_response(response, "cannot parse answer to liteServer.getBlock : " +\ + res.move_as_error().to_string()); + } else { + auto f = F.move_as_ok(); + auto blk_id = ton::create_block_id(f->id_); + if (blk_id != blkid) { + web_error_response(response, "block id mismatch: expected data for block " + blkid.to_str() +\ + ", obtained for " + blk_id.to_str()); + } + td::actor::send_closure_later(Self, &TestNode::got_block_web, blk_id, std::move(f->data_), dump, response); + } + } + }); +} + + +void TestNode::got_block_web(ton::BlockIdExt blkid, td::BufferSlice data, bool dump, std::shared_ptr response) { + LOG(INFO) << "obtained " << data.size() << " data bytes for block " << blkid.to_str(); + ton::FileHash fhash; + td::sha256(data.as_slice(), fhash.as_slice()); + if (fhash != blkid.file_hash) { + web_error_response(response, "file hash mismatch for block " + blkid.to_str() +\ + ": expected " + blkid.file_hash.to_hex() + ", computed " + fhash.to_hex()); + return; + } + if (!db_root_.empty()) { + auto res = save_db_file(fhash, data.clone()); + if (res.is_error()) { + web_error_response(response, "error saving block file: " + res.to_string()); + } + } + if (dump) { + auto res = vm::std_boc_deserialize(data.clone()); + if (res.is_error()) { + web_error_response(response, "cannot deserialize block data " + res.move_as_error().to_string()); + return; + } + auto root = res.move_as_ok(); + ton::RootHash rhash{root->get_hash().bits()}; + if (rhash != blkid.root_hash) { + web_error_response(response, "block root hash mismatch: data has " + rhash.to_hex() +\ + " , expected " + blkid.root_hash.to_hex()); + return; + } + //auto out = td::TerminalIO::out(); + //out << "block contents is "; + std::ostringstream block_data, vm_data, header_data; + block::gen::t_Block.print_ref(block_data, root); + vm::load_cell_slice(root).print_rec(vm_data); + give_block_header_description(header_data, blkid, std::move(root), 0xffff); + + + pt::ptree result; + result.put("block", block_data.str()); + result.put("vm", vm_data.str()); + result.put("header", header_data.str()); + + web_success_response(response, result); + } else { + auto res = lazy_boc_deserialize(data.clone()); + if (res.is_error()) { + web_error_response(response, "cannot lazily deserialize block data : " + res.move_as_error().to_string()); + return; + } + auto pair = res.move_as_ok(); + auto root = std::move(pair.first); + ton::RootHash rhash{root->get_hash().bits()}; + if (rhash != blkid.root_hash) { + web_error_response(response, "block root hash mismatch: data has " + rhash.to_hex() +\ + " , expected " + blkid.root_hash.to_hex()); + return; + } + show_block_header(blkid, std::move(root), 0xffff); + } +} + +bool TestNode::give_block_header_description(std::ostringstream& out, ton::BlockIdExt blkid, Ref root, int mode) { + ton::RootHash vhash{root->get_hash().bits()}; + if (vhash != blkid.root_hash) { + LOG(ERROR) << " block header for block " << blkid.to_str() << " has incorrect root hash " << vhash.to_hex() + << " instead of " << blkid.root_hash.to_hex(); + return false; + } + std::vector prev; + ton::BlockIdExt mc_blkid, blkid_u = blkid; + bool after_split; + auto res = block::unpack_block_prev_blk_ext(root, blkid_u, prev, mc_blkid, after_split); + if (res.is_error()) { + LOG(ERROR) << "cannot unpack header for block " << blkid.to_str() << " : " << res.to_string(); + return false; + } + if (blkid_u.id != blkid.id || blkid_u.root_hash != blkid.root_hash) { + LOG(ERROR) << "block header claims it is for block " << blkid_u.to_str() << " not " << blkid.to_str(); + return false; + } + block::gen::Block::Record blk; + block::gen::BlockInfo::Record info; + if (!(tlb::unpack_cell(root, blk) && tlb::unpack_cell(blk.info, info))) { + LOG(ERROR) << "cannot unpack header for block " << blkid.to_str(); + return false; + } + out << "block header of " << blkid.to_str() << " @ " << info.gen_utime << " lt " << info.start_lt << " .. " + << info.end_lt << std::endl; + out << "global_id=" << blk.global_id << " version=" << info.version << " not_master=" << info.not_master + << " after_merge=" << info.after_merge << " after_split=" << info.after_split + << " before_split=" << info.before_split << " want_merge=" << info.want_merge << " want_split=" << info.want_split + << " validator_list_hash_short=" << info.gen_validator_list_hash_short + << " catchain_seqno=" << info.gen_catchain_seqno << " min_ref_mc_seqno=" << info.min_ref_mc_seqno << std::endl; + int cnt = 0; + for (auto id : prev) { + out << "previous block #" << ++cnt << " : " << id.to_str() << std::endl; + } + out << "reference masterchain block : " << mc_blkid.to_str() << std::endl; + return true; +} \ No newline at end of file diff --git a/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/include/method-last.cpp b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/include/method-last.cpp new file mode 100644 index 0000000..c7f9683 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/include/method-last.cpp @@ -0,0 +1,19 @@ +bool TestNode::get_server_mc_block_id_web(std::shared_ptr response) { + auto b = ton::serialize_tl_object(ton::create_tl_object(), true); + return envelope_send_query(std::move(b), [Self = actor_id(this), response](td::Result res)->void { + if (res.is_error()) { + web_error_response(response, "cannot get masterchain info from server"); + return; + } else { + auto F = ton::fetch_tl_object(res.move_as_ok(), true); + if (F.is_error()) { + web_error_response(response, "cannot parse answer to liteServer.getMasterchainInfo"); + } else { + auto f = F.move_as_ok(); + auto blk_id = create_block_id(f->last_); + // auto zstate_id = create_zero_state_id(f->init_); + web_success_response(response, blk_id.to_str()); + } + } + }); +} \ No newline at end of file diff --git a/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/include/method-time.cpp b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/include/method-time.cpp new file mode 100644 index 0000000..829ea7f --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/include/method-time.cpp @@ -0,0 +1,19 @@ +void TestNode::get_server_time_web(std::shared_ptr response) { + auto b = ton::serialize_tl_object(ton::create_tl_object(), true); + + envelope_send_web(std::move(b), [&, Self = actor_id(this), response](td::Result res) -> void { + if (res.is_error()) { + web_error_response(response, "cannot get server time"); + return; + } else { + auto F = ton::fetch_tl_object(res.move_as_ok(), true); + if (F.is_error()) { + web_error_response(response, "cannot parse answer to liteServer.getTime"); + } else { + server_time_ = F.move_as_ok()->now_; + server_time_got_at_ = static_cast(td::Clocks::system()); + web_success_response(response, std::to_string(server_time_)); + } + } + }, response); +} \ No newline at end of file diff --git a/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/lite-client-test-node-helpers.cpp b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/lite-client-test-node-helpers.cpp new file mode 100644 index 0000000..8f09358 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/lite-client-test-node-helpers.cpp @@ -0,0 +1,123 @@ +td::Result, std::shared_ptr>> lazy_boc_deserialize( + td::BufferSlice data) { + vm::StaticBagOfCellsDbLazy::Options options; + options.check_crc32c = true; + TRY_RESULT(boc, vm::StaticBagOfCellsDbLazy::create(vm::BufferSliceBlobView::create(std::move(data)), options)); + TRY_RESULT(rc, boc->get_root_count()); + if (rc != 1) { + return td::Status::Error(-668, "bag-of-cells is not standard (exactly one root cell expected)"); + } + TRY_RESULT(root, boc->get_root_cell(0)); + return std::make_pair(std::move(root), std::move(boc)); +} + +bool TestNode::parse_account_addr(std::string acc_string, ton::WorkchainId& wc, ton::StdSmcAddress& addr) { + block::StdAddress a{acc_string}; + if (a.is_valid()) { + wc = a.workchain; + addr = a.addr; + return true; + } + auto pos = acc_string.find(':'); + if (pos != std::string::npos) { + std::string tmp{acc_string, 0, pos}; + char* end = 0; + wc = static_cast(strtoll(tmp.c_str(), &end, 10)); + if (end != tmp.c_str() + pos) { + return false; + } + if (wc != 0 && wc != -1) { + return false; + } + acc_string.erase(0, pos + 1); + } + LOG(DEBUG) << "parsing " << acc_string << " address"; + if (acc_string.size() == 64) { + int i = 0; + for (int c : acc_string) { + int x = parse_hex_digit(c), m = 15; + if (x < 0) { + i = -1; + break; + } + if (!(i & 1)) { + x <<= 4; + m <<= 4; + } + addr.data()[i >> 1] = (unsigned char)((addr.data()[i >> 1] & ~m) | x); + i++; + } + if (i >= 0) { + return wc != ton::workchainInvalid; + } + } + return false; +} + +bool TestNode::envelope_send_web(td::BufferSlice query, + td::Promise promise, + std::shared_ptr response) { + if (!ready_ || client_.empty()) { + web_error_response(response, "failed to send query to server: not ready"); + return false; + } + auto P = td::PromiseCreator::lambda([promise = std::move(promise), response](td::Result R) mutable { + if (R.is_error()) { + auto err = R.move_as_error(); + web_error_response(response, "failed query"); + promise.set_error(std::move(err)); + return; + } + auto data = R.move_as_ok(); + auto F = ton::fetch_tl_object(data.clone(), true); + if (F.is_ok()) { + auto f = F.move_as_ok(); + auto err = td::Status::Error(f->code_, f->message_); + web_error_response(response, "received error"); + promise.set_error(std::move(err)); + return; + } + promise.set_result(std::move(data)); + }); + td::BufferSlice b = + ton::serialize_tl_object(ton::create_tl_object(std::move(query)), true); + td::actor::send_closure(client_, &ton::AdnlExtClient::send_query, "query", std::move(b), td::Timestamp::in(10.0), + std::move(P)); + return true; +} + +bool TestNode::get_server_mc_block_id_silent() { + auto b = ton::serialize_tl_object(ton::create_tl_object(), true); + return envelope_send_query(std::move(b), [Self = actor_id(this)](td::Result res) -> void { + if (res.is_error()) { + return; + } else { + auto F = ton::fetch_tl_object(res.move_as_ok(), true); + if (F.is_error()) { + } else { + auto f = F.move_as_ok(); + auto blk_id = create_block_id(f->last_); + auto zstate_id = create_zero_state_id(f->init_); + td::actor::send_closure_later(Self, &TestNode::got_server_mc_block_id_silent, blk_id, zstate_id); + } + } + }); +} + +void TestNode::got_server_mc_block_id_silent(ton::BlockIdExt blkid, ton::ZeroStateIdExt zstateid) { + if (!zstate_id_.is_valid()) { + zstate_id_ = zstateid; + } else if (zstate_id_ != zstateid) { + stop(); + return; + } + register_blkid(blkid); + //register_blkid(zstateid); + if (!mc_last_id_.is_valid()) { + mc_last_id_ = blkid; + request_block(blkid); + // request_state(blkid); + } else if (mc_last_id_.id.seqno < blkid.id.seqno) { + mc_last_id_ = blkid; + } +} \ No newline at end of file diff --git a/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/lite-client-test-node.cpp b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/lite-client-test-node.cpp new file mode 100644 index 0000000..c34b082 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/lite-client-test-node.cpp @@ -0,0 +1,184 @@ +#include "adnl/adnl-ext-client.h" +#include "tl-utils/tl-utils.hpp" +#include "ton/ton-types.h" +#include "terminal/terminal.h" +#include "vm/cells.h" + +#include "server_http.hpp" + +#include +#include + +// Short alias for this namespace +namespace pt = boost::property_tree; + +using td::Ref; + +using HttpServer = SimpleWeb::Server; +using td::Ref; + +class TestNode : public td::actor::Actor { + private: + std::string local_config_ = "ton-local.config"; + std::string global_config_ = "ton-global.config"; + + td::actor::ActorOwn client_; + td::actor::ActorOwn io_; + + bool readline_enabled_ = true; + td::int32 liteserver_idx_ = -1; + + bool ready_ = false; + bool inited_ = false; + bool update_on_demand_enabled_ = true; + std::string db_root_; + + int server_time_ = 0; + int server_time_got_at_ = 0; + + ton::ZeroStateIdExt zstate_id_; + ton::BlockIdExt mc_last_id_; + + ton::BlockIdExt last_block_id_, last_state_id_; + td::BufferSlice last_block_data_, last_state_data_; + + std::string line_; + const char *parse_ptr_, *parse_end_; + td::Status error_; + + std::vector known_blk_ids_; + std::size_t shown_blk_ids_ = 0; + + std::unique_ptr make_callback(); + + void run_init_queries(); + bool get_server_time(); + bool get_server_mc_block_id(); + bool get_server_mc_block_id_silent(); + void got_server_mc_block_id(ton::BlockIdExt blkid, ton::ZeroStateIdExt zstateid); + void got_server_mc_block_id_silent(ton::BlockIdExt blkid, ton::ZeroStateIdExt zstateid); + bool request_block(ton::BlockIdExt blkid); + bool request_state(ton::BlockIdExt blkid); + void got_mc_block(ton::BlockIdExt blkid, td::BufferSlice data); + void got_mc_state(ton::BlockIdExt blkid, ton::RootHash root_hash, ton::FileHash file_hash, td::BufferSlice data); + td::Status send_set_verbosity(std::string verbosity); + td::Status send_ext_msg_from_filename(std::string filename); + td::Status save_db_file(ton::FileHash file_hash, td::BufferSlice data); + bool get_account_state(ton::WorkchainId workchain, ton::StdSmcAddress addr, ton::BlockIdExt ref_blkid); + void got_account_state(ton::BlockIdExt ref_blk, ton::BlockIdExt blk, ton::BlockIdExt shard_blk, + td::BufferSlice shard_proof, td::BufferSlice proof, td::BufferSlice state, + ton::WorkchainId workchain, ton::StdSmcAddress addr); + bool get_all_shards(bool use_last = true, ton::BlockIdExt blkid = {}); + void got_all_shards(ton::BlockIdExt blk, td::BufferSlice proof, td::BufferSlice data); + bool get_block(ton::BlockIdExt blk, bool dump = false); + void got_block(ton::BlockIdExt blkid, td::BufferSlice data, bool dump); + bool get_state(ton::BlockIdExt blk, bool dump = false); + void got_state(ton::BlockIdExt blkid, ton::RootHash root_hash, ton::FileHash file_hash, td::BufferSlice data, + bool dump); + bool get_block_header(ton::BlockIdExt blk, int mode); + void got_block_header(ton::BlockIdExt blkid, td::BufferSlice data, int mode); + bool show_block_header(ton::BlockIdExt blkid, Ref root, int mode); + bool show_state_header(ton::BlockIdExt blkid, Ref root, int mode); + bool get_one_transaction(ton::BlockIdExt blkid, ton::WorkchainId workchain, ton::StdSmcAddress addr, + ton::LogicalTime lt, bool dump = false); + void got_one_transaction(ton::BlockIdExt req_blkid, ton::BlockIdExt blkid, td::BufferSlice proof, + td::BufferSlice transaction, ton::WorkchainId workchain, ton::StdSmcAddress addr, + ton::LogicalTime trans_lt, bool dump); + bool get_last_transactions(ton::WorkchainId workchain, ton::StdSmcAddress addr, ton::LogicalTime lt, + ton::Bits256 hash, unsigned count, bool dump); + void got_last_transactions(std::vector blkids, td::BufferSlice transactions_boc, + ton::WorkchainId workchain, ton::StdSmcAddress addr, ton::LogicalTime lt, + ton::Bits256 hash, unsigned count, bool dump); + + bool do_parse_line(); + bool show_help(std::string command); + std::string get_word(char delim = ' '); + int skipspc(); + std::string get_line_tail(bool remove_spaces = true) const; + bool eoln() const; + bool seekeoln(); + bool set_error(td::Status error); + bool set_error(std::string err_msg); + void show_context() const; + bool parse_account_addr(ton::WorkchainId& wc, ton::StdSmcAddress& addr); + static int parse_hex_digit(int c); + static bool parse_hash(const char* str, ton::Bits256& hash); + static bool parse_uint64(std::string word, td::uint64& val); + bool parse_hash(ton::Bits256& hash); + bool parse_lt(ton::LogicalTime& lt); + bool parse_block_id_ext(ton::BlockIdExt& blkid, bool allow_incomplete = false); + bool parse_block_id_ext(std::string blk_id_string, ton::BlockIdExt& blkid, bool allow_incomplete = false) const; + bool register_blkid(const ton::BlockIdExt& blkid); + bool show_new_blkids(bool all = false); + bool complete_blkid(ton::BlockId partial_blkid, ton::BlockIdExt& complete_blkid) const; + + static bool parse_account_addr(std::string acc_string, ton::WorkchainId& wc, ton::StdSmcAddress& addr); + + public: + void conn_ready() { + LOG(ERROR) << "conn ready"; + ready_ = true; + if (!inited_) { + run_init_queries(); + } + } + void conn_closed() { + ready_ = false; + } + void set_local_config(std::string str) { + local_config_ = str; + } + void set_global_config(std::string str) { + global_config_ = str; + } + void set_db_root(std::string db_root) { + db_root_ = db_root; + } + void set_readline_enabled(bool value) { + readline_enabled_ = value; + } + void set_liteserver_idx(td::int32 idx) { + liteserver_idx_ = idx; + } + void set_update_on_demand(bool value) { + update_on_demand_enabled_ = value; + } + + void start_up() override { + } + void tear_down() override { + // FIXME: do not work in windows + //td::actor::SchedulerContext::get()->stop(); + } + + bool envelope_send_query(td::BufferSlice query, td::Promise promise); + bool envelope_send_web(td::BufferSlice query, td::Promise promise, std::shared_ptr response); + void parse_line(td::BufferSlice data); + + // web server methods + void get_server_time_web(std::shared_ptr response); + void get_account_state_web(std::string address, std::shared_ptr response); + void got_account_state_web(ton::BlockIdExt blk, ton::BlockIdExt shard_blk, td::BufferSlice shard_proof, + td::BufferSlice proof, td::BufferSlice state, ton::WorkchainId workchain, + ton::StdSmcAddress addr, std::shared_ptr response); + void get_block_web(std::string blkid_str, std::shared_ptr response, bool dump = true); + void got_block_web(ton::BlockIdExt blkid, td::BufferSlice data, bool dump, std::shared_ptr response); + bool give_block_header_description(std::ostringstream& out, ton::BlockIdExt blkid, Ref root, int mode); + + bool get_server_mc_block_id_web(std::shared_ptr response); + + TestNode() { + } + + void run(); + + // Web Server Methods + static void run_web_server(td::actor::Scheduler* scheduler, td::actor::ActorOwn* x); + static void web_error_response(std::shared_ptr response, std::string msg); + static void web_success_response(std::shared_ptr response, std::string msg); + static void web_success_response(std::shared_ptr response, pt::ptree root); + + void web_last(){ + get_server_mc_block_id_silent(); + } +}; \ No newline at end of file diff --git a/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/lite-client-web-server.cpp b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/lite-client-web-server.cpp new file mode 100644 index 0000000..5d8b6e9 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/lite-client-web-server.cpp @@ -0,0 +1,80 @@ +void TestNode::web_error_response(std::shared_ptr response, std::string msg) { + pt::ptree root; + std::ostringstream oss; + root.put("error", msg); + pt::write_json(oss, root); + response -> write(SimpleWeb::StatusCode::server_error_internal_server_error, + oss.str()); +} + +void TestNode::web_success_response(std::shared_ptr response, std::string msg) { + pt::ptree root; + std::ostringstream oss; + root.put("result", msg); + pt::write_json(oss, root); + response -> write(oss.str()); +} + +void TestNode::web_success_response(std::shared_ptr response, pt::ptree result) { + pt::ptree root; + std::ostringstream oss; + root.put_child("result", result); + pt::write_json(oss, root); + response -> write(oss.str()); +} + +void TestNode::run_web_server(td::actor::Scheduler* scheduler, td::actor::ActorOwn* x){ + HttpServer server; + server.config.port = 8000; + + // get a time + server.resource["^/time$"]["GET"] = [scheduler, x](std::shared_ptr response, + std::shared_ptr request) { + + std::thread work_thread([response, scheduler, x] { + scheduler -> run_in_context([&] { + td::actor::send_closure(x -> get(), &TestNode::get_server_time_web, response); + }); + }); + work_thread.detach(); + }; + // + + // get a account + server.resource["^/getaccount/(.+)$"]["GET"] = [scheduler, x](std::shared_ptr response, + std::shared_ptr request) { + std::string address = request -> path_match[1].str(); + + std::thread work_thread([response, scheduler, x, address] { + scheduler -> run_in_context([&] { + td::actor::send_closure(x -> get(), &TestNode::get_account_state_web, address, response); + }); + }); + work_thread.detach(); + }; + + // get a block + server.resource["^/getblock/(.+)$"]["GET"] = [scheduler, x](std::shared_ptr response, + std::shared_ptr request) { + std::string blkid_str = request -> path_match[1].str(); + std::thread work_thread([response, scheduler, x, blkid_str] { + scheduler -> run_in_context([&] { + td::actor::send_closure(x -> get(), &TestNode::get_block_web, blkid_str, response, true); + }); + }); + work_thread.detach(); + }; + + // get a last block + server.resource["^/last$"]["GET"] = [scheduler, x](std::shared_ptr response, + std::shared_ptr request) { + std::thread work_thread([response, scheduler, x] { + scheduler -> run_in_context([&] { + td::actor::send_closure(x -> get(), &TestNode::get_server_mc_block_id_web, response); + }); + }); + work_thread.detach(); + }; + + server.start(); +} \ No newline at end of file diff --git a/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/server_http.hpp b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/server_http.hpp new file mode 100644 index 0000000..35fee64 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/server_http.hpp @@ -0,0 +1,794 @@ +#ifndef SERVER_HTTP_HPP +#define SERVER_HTTP_HPP + +#include "utility.hpp" +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef USE_STANDALONE_ASIO +#include +#include +namespace SimpleWeb { + using error_code = std::error_code; + using errc = std::errc; + namespace make_error_code = std; +} // namespace SimpleWeb +#else +#include +#include +namespace SimpleWeb { + namespace asio = boost::asio; + using error_code = boost::system::error_code; + namespace errc = boost::system::errc; + namespace make_error_code = boost::system::errc; +} // namespace SimpleWeb +#endif + +// Late 2017 TODO: remove the following checks and always use std::regex +#ifdef USE_BOOST_REGEX +#include +namespace SimpleWeb { + namespace regex = boost; +} +#else +#include +namespace SimpleWeb { + namespace regex = std; +} +#endif + +namespace SimpleWeb { + template + class Server; + + template + class ServerBase { + protected: + class Session; + + public: + class Response : public std::enable_shared_from_this, public std::ostream { + friend class ServerBase; + friend class Server; + + std::unique_ptr streambuf = std::unique_ptr(new asio::streambuf()); + + std::shared_ptr session; + long timeout_content; + + asio::io_service::strand strand; + std::list, std::function>> send_queue; + + Response(std::shared_ptr session_, long timeout_content) noexcept : std::ostream(nullptr), session(std::move(session_)), timeout_content(timeout_content), strand(session->connection->socket->get_io_service()) { + rdbuf(streambuf.get()); + } + + template + void write_header(const CaseInsensitiveMultimap &header, size_type size) { + bool content_length_written = false; + bool chunked_transfer_encoding = false; + for(auto &field : header) { + if(!content_length_written && case_insensitive_equal(field.first, "content-length")) + content_length_written = true; + else if(!chunked_transfer_encoding && case_insensitive_equal(field.first, "transfer-encoding") && case_insensitive_equal(field.second, "chunked")) + chunked_transfer_encoding = true; + + *this << field.first << ": " << field.second << "\r\n"; + } + if(!content_length_written && !chunked_transfer_encoding && !close_connection_after_response) + *this << "Content-Length: " << size << "\r\n\r\n"; + else + *this << "\r\n"; + } + + void send_from_queue() { + auto self = this->shared_from_this(); + strand.post([self]() { + asio::async_write(*self->session->connection->socket, *self->send_queue.begin()->first, self->strand.wrap([self](const error_code &ec, std::size_t /*bytes_transferred*/) { + auto lock = self->session->connection->handler_runner->continue_lock(); + if(!lock) + return; + if(!ec) { + auto it = self->send_queue.begin(); + if(it->second) + it->second(ec); + self->send_queue.erase(it); + if(self->send_queue.size() > 0) + self->send_from_queue(); + } + else { + // All handlers in the queue is called with ec: + for(auto &pair : self->send_queue) { + if(pair.second) + pair.second(ec); + } + self->send_queue.clear(); + } + })); + }); + } + + void send_on_delete(const std::function &callback = nullptr) noexcept { + session->connection->set_timeout(timeout_content); + auto self = this->shared_from_this(); // Keep Response instance alive through the following async_write + asio::async_write(*session->connection->socket, *streambuf, [self, callback](const error_code &ec, std::size_t /*bytes_transferred*/) { + self->session->connection->cancel_timeout(); + auto lock = self->session->connection->handler_runner->continue_lock(); + if(!lock) + return; + if(callback) + callback(ec); + }); + } + + public: + std::size_t size() noexcept { + return streambuf->size(); + } + + /// Use this function if you need to recursively send parts of a longer message, or when using server-sent events (SSE). + void send(const std::function &callback = nullptr) noexcept { + session->connection->set_timeout(timeout_content); + + std::shared_ptr streambuf = std::move(this->streambuf); + this->streambuf = std::unique_ptr(new asio::streambuf()); + rdbuf(this->streambuf.get()); + + auto self = this->shared_from_this(); + strand.post([self, streambuf, callback]() { + self->send_queue.emplace_back(streambuf, callback); + if(self->send_queue.size() == 1) + self->send_from_queue(); + }); + } + + /// Write directly to stream buffer using std::ostream::write + void write(const char_type *ptr, std::streamsize n) { + std::ostream::write(ptr, n); + } + + /// Convenience function for writing status line, potential header fields, and empty content + void write(StatusCode status_code = StatusCode::success_ok, const CaseInsensitiveMultimap &header = CaseInsensitiveMultimap()) { + *this << "HTTP/1.1 " << SimpleWeb::status_code(status_code) << "\r\n"; + write_header(header, 0); + } + + /// Convenience function for writing status line, header fields, and content + void write(StatusCode status_code, string_view content, const CaseInsensitiveMultimap &header = CaseInsensitiveMultimap()) { + *this << "HTTP/1.1 " << SimpleWeb::status_code(status_code) << "\r\n"; + write_header(header, content.size()); + if(!content.empty()) + *this << content; + } + + /// Convenience function for writing status line, header fields, and content + void write(StatusCode status_code, std::istream &content, const CaseInsensitiveMultimap &header = CaseInsensitiveMultimap()) { + *this << "HTTP/1.1 " << SimpleWeb::status_code(status_code) << "\r\n"; + content.seekg(0, std::ios::end); + auto size = content.tellg(); + content.seekg(0, std::ios::beg); + write_header(header, size); + if(size) + *this << content.rdbuf(); + } + + /// Convenience function for writing success status line, header fields, and content + void write(string_view content, const CaseInsensitiveMultimap &header = CaseInsensitiveMultimap()) { + write(StatusCode::success_ok, content, header); + } + + /// Convenience function for writing success status line, header fields, and content + void write(std::istream &content, const CaseInsensitiveMultimap &header = CaseInsensitiveMultimap()) { + write(StatusCode::success_ok, content, header); + } + + /// Convenience function for writing success status line, and header fields + void write(const CaseInsensitiveMultimap &header) { + write(StatusCode::success_ok, std::string(), header); + } + + /// If true, force server to close the connection after the response have been sent. + /// + /// This is useful when implementing a HTTP/1.0-server sending content + /// without specifying the content length. + bool close_connection_after_response = false; + }; + + class Content : public std::istream { + friend class ServerBase; + + public: + std::size_t size() noexcept { + return streambuf.size(); + } + /// Convenience function to return std::string. The stream buffer is consumed. + std::string string() noexcept { + try { + std::string str; + auto size = streambuf.size(); + str.resize(size); + read(&str[0], static_cast(size)); + return str; + } + catch(...) { + return std::string(); + } + } + + private: + asio::streambuf &streambuf; + Content(asio::streambuf &streambuf) noexcept : std::istream(&streambuf), streambuf(streambuf) {} + }; + + class Request { + friend class ServerBase; + friend class Server; + friend class Session; + + asio::streambuf streambuf; + + Request(std::size_t max_request_streambuf_size, std::shared_ptr remote_endpoint_) noexcept + : streambuf(max_request_streambuf_size), content(streambuf), remote_endpoint(std::move(remote_endpoint_)) {} + + public: + std::string method, path, query_string, http_version; + + Content content; + + CaseInsensitiveMultimap header; + + regex::smatch path_match; + + std::shared_ptr remote_endpoint; + + /// The time point when the request header was fully read. + std::chrono::system_clock::time_point header_read_time; + + std::string remote_endpoint_address() const noexcept { + try { + return remote_endpoint->address().to_string(); + } + catch(...) { + return std::string(); + } + } + + unsigned short remote_endpoint_port() const noexcept { + return remote_endpoint->port(); + } + + /// Returns query keys with percent-decoded values. + CaseInsensitiveMultimap parse_query_string() const noexcept { + return SimpleWeb::QueryString::parse(query_string); + } + }; + + protected: + class Connection : public std::enable_shared_from_this { + public: + template + Connection(std::shared_ptr handler_runner_, Args &&... args) noexcept : handler_runner(std::move(handler_runner_)), socket(new socket_type(std::forward(args)...)) {} + + std::shared_ptr handler_runner; + + std::unique_ptr socket; // Socket must be unique_ptr since asio::ssl::stream is not movable + + std::unique_ptr timer; + + std::shared_ptr remote_endpoint; + + void close() noexcept { + error_code ec; + socket->lowest_layer().shutdown(asio::ip::tcp::socket::shutdown_both, ec); + socket->lowest_layer().cancel(ec); + } + + void set_timeout(long seconds) noexcept { + if(seconds == 0) { + timer = nullptr; + return; + } + + timer = std::unique_ptr(new asio::steady_timer(socket->get_io_service())); + timer->expires_from_now(std::chrono::seconds(seconds)); + auto self = this->shared_from_this(); + timer->async_wait([self](const error_code &ec) { + if(!ec) + self->close(); + }); + } + + void cancel_timeout() noexcept { + if(timer) { + error_code ec; + timer->cancel(ec); + } + } + }; + + class Session { + public: + Session(std::size_t max_request_streambuf_size, std::shared_ptr connection_) noexcept : connection(std::move(connection_)) { + if(!this->connection->remote_endpoint) { + error_code ec; + this->connection->remote_endpoint = std::make_shared(this->connection->socket->lowest_layer().remote_endpoint(ec)); + } + request = std::shared_ptr(new Request(max_request_streambuf_size, this->connection->remote_endpoint)); + } + + std::shared_ptr connection; + std::shared_ptr request; + }; + + public: + class Config { + friend class ServerBase; + + Config(unsigned short port) noexcept : port(port) {} + + public: + /// Port number to use. Defaults to 80 for HTTP and 443 for HTTPS. Set to 0 get an assigned port. + unsigned short port; + /// If io_service is not set, number of threads that the server will use when start() is called. + /// Defaults to 1 thread. + std::size_t thread_pool_size = 1; + /// Timeout on request handling. Defaults to 5 seconds. + long timeout_request = 5; + /// Timeout on content handling. Defaults to 300 seconds. + long timeout_content = 300; + /// Maximum size of request stream buffer. Defaults to architecture maximum. + /// Reaching this limit will result in a message_size error code. + std::size_t max_request_streambuf_size = std::numeric_limits::max(); + /// IPv4 address in dotted decimal form or IPv6 address in hexadecimal notation. + /// If empty, the address will be any address. + std::string address; + /// Set to false to avoid binding the socket to an address that is already in use. Defaults to true. + bool reuse_address = true; + /// Make use of RFC 7413 or TCP Fast Open (TFO) + bool fast_open = false; + }; + /// Set before calling start(). + Config config; + + private: + class regex_orderable : public regex::regex { + public: + std::string str; + + regex_orderable(const char *regex_cstr) : regex::regex(regex_cstr), str(regex_cstr) {} + regex_orderable(std::string regex_str_) : regex::regex(regex_str_), str(std::move(regex_str_)) {} + bool operator<(const regex_orderable &rhs) const noexcept { + return str < rhs.str; + } + }; + + public: + /// Warning: do not add or remove resources after start() is called + std::map::Response>, std::shared_ptr::Request>)>>> resource; + + std::map::Response>, std::shared_ptr::Request>)>> default_resource; + + std::function::Request>, const error_code &)> on_error; + + std::function &, std::shared_ptr::Request>)> on_upgrade; + + /// If you have your own asio::io_service, store its pointer here before running start(). + std::shared_ptr io_service; + + /// If you know the server port in advance, use start() instead. + /// Returns assigned port. If io_service is not set, an internal io_service is created instead. + /// Call before accept_and_run(). + unsigned short bind() { + asio::ip::tcp::endpoint endpoint; + if(config.address.size() > 0) + endpoint = asio::ip::tcp::endpoint(asio::ip::address::from_string(config.address), config.port); + else + endpoint = asio::ip::tcp::endpoint(asio::ip::tcp::v6(), config.port); + + if(!io_service) { + io_service = std::make_shared(); + internal_io_service = true; + } + + if(!acceptor) + acceptor = std::unique_ptr(new asio::ip::tcp::acceptor(*io_service)); + acceptor->open(endpoint.protocol()); + acceptor->set_option(asio::socket_base::reuse_address(config.reuse_address)); + if(config.fast_open) { +#if defined(__linux__) && defined(TCP_FASTOPEN) + const int qlen = 5; // This seems to be the value that is used in other examples. + error_code ec; + acceptor->set_option(asio::detail::socket_option::integer(qlen), ec); +#endif // End Linux + } + acceptor->bind(endpoint); + + after_bind(); + + return acceptor->local_endpoint().port(); + } + + /// If you know the server port in advance, use start() instead. + /// Accept requests, and if io_service was not set before calling bind(), run the internal io_service instead. + /// Call after bind(). + void accept_and_run() { + acceptor->listen(); + accept(); + + if(internal_io_service) { + if(io_service->stopped()) + io_service->reset(); + + // If thread_pool_size>1, start m_io_service.run() in (thread_pool_size-1) threads for thread-pooling + threads.clear(); + for(std::size_t c = 1; c < config.thread_pool_size; c++) { + threads.emplace_back([this]() { + this->io_service->run(); + }); + } + + // Main thread + if(config.thread_pool_size > 0) + io_service->run(); + + // Wait for the rest of the threads, if any, to finish as well + for(auto &t : threads) + t.join(); + } + } + + /// Start the server by calling bind() and accept_and_run() + void start() { + bind(); + accept_and_run(); + } + + /// Stop accepting new requests, and close current connections. + void stop() noexcept { + if(acceptor) { + error_code ec; + acceptor->close(ec); + + { + std::lock_guard lock(*connections_mutex); + for(auto &connection : *connections) + connection->close(); + connections->clear(); + } + + if(internal_io_service) + io_service->stop(); + } + } + + virtual ~ServerBase() noexcept { + handler_runner->stop(); + stop(); + } + + protected: + bool internal_io_service = false; + + std::unique_ptr acceptor; + std::vector threads; + + std::shared_ptr> connections; + std::shared_ptr connections_mutex; + + std::shared_ptr handler_runner; + + ServerBase(unsigned short port) noexcept : config(port), connections(new std::unordered_set()), connections_mutex(new std::mutex()), handler_runner(new ScopeRunner()) {} + + virtual void after_bind() {} + virtual void accept() = 0; + + template + std::shared_ptr create_connection(Args &&... args) noexcept { + auto connections = this->connections; + auto connections_mutex = this->connections_mutex; + auto connection = std::shared_ptr(new Connection(handler_runner, std::forward(args)...), [connections, connections_mutex](Connection *connection) { + { + std::lock_guard lock(*connections_mutex); + auto it = connections->find(connection); + if(it != connections->end()) + connections->erase(it); + } + delete connection; + }); + { + std::lock_guard lock(*connections_mutex); + connections->emplace(connection.get()); + } + return connection; + } + + void read(const std::shared_ptr &session) { + session->connection->set_timeout(config.timeout_request); + asio::async_read_until(*session->connection->socket, session->request->streambuf, "\r\n\r\n", [this, session](const error_code &ec, std::size_t bytes_transferred) { + session->connection->cancel_timeout(); + auto lock = session->connection->handler_runner->continue_lock(); + if(!lock) + return; + session->request->header_read_time = std::chrono::system_clock::now(); + if((!ec || ec == asio::error::not_found) && session->request->streambuf.size() == session->request->streambuf.max_size()) { + auto response = std::shared_ptr(new Response(session, this->config.timeout_content)); + response->write(StatusCode::client_error_payload_too_large); + if(this->on_error) + this->on_error(session->request, make_error_code::make_error_code(errc::message_size)); + return; + } + if(!ec) { + // request->streambuf.size() is not necessarily the same as bytes_transferred, from Boost-docs: + // "After a successful async_read_until operation, the streambuf may contain additional data beyond the delimiter" + // The chosen solution is to extract lines from the stream directly when parsing the header. What is left of the + // streambuf (maybe some bytes of the content) is appended to in the async_read-function below (for retrieving content). + std::size_t num_additional_bytes = session->request->streambuf.size() - bytes_transferred; + + if(!RequestMessage::parse(session->request->content, session->request->method, session->request->path, + session->request->query_string, session->request->http_version, session->request->header)) { + if(this->on_error) + this->on_error(session->request, make_error_code::make_error_code(errc::protocol_error)); + return; + } + + // If content, read that as well + auto header_it = session->request->header.find("Content-Length"); + if(header_it != session->request->header.end()) { + unsigned long long content_length = 0; + try { + content_length = stoull(header_it->second); + } + catch(const std::exception &) { + if(this->on_error) + this->on_error(session->request, make_error_code::make_error_code(errc::protocol_error)); + return; + } + if(content_length > num_additional_bytes) { + session->connection->set_timeout(config.timeout_content); + asio::async_read(*session->connection->socket, session->request->streambuf, asio::transfer_exactly(content_length - num_additional_bytes), [this, session](const error_code &ec, std::size_t /*bytes_transferred*/) { + session->connection->cancel_timeout(); + auto lock = session->connection->handler_runner->continue_lock(); + if(!lock) + return; + if(!ec) { + if(session->request->streambuf.size() == session->request->streambuf.max_size()) { + auto response = std::shared_ptr(new Response(session, this->config.timeout_content)); + response->write(StatusCode::client_error_payload_too_large); + if(this->on_error) + this->on_error(session->request, make_error_code::make_error_code(errc::message_size)); + return; + } + this->find_resource(session); + } + else if(this->on_error) + this->on_error(session->request, ec); + }); + } + else + this->find_resource(session); + } + else if((header_it = session->request->header.find("Transfer-Encoding")) != session->request->header.end() && header_it->second == "chunked") { + auto chunks_streambuf = std::make_shared(this->config.max_request_streambuf_size); + this->read_chunked_transfer_encoded(session, chunks_streambuf); + } + else + this->find_resource(session); + } + else if(this->on_error) + this->on_error(session->request, ec); + }); + } + + void read_chunked_transfer_encoded(const std::shared_ptr &session, const std::shared_ptr &chunks_streambuf) { + session->connection->set_timeout(config.timeout_content); + asio::async_read_until(*session->connection->socket, session->request->streambuf, "\r\n", [this, session, chunks_streambuf](const error_code &ec, size_t bytes_transferred) { + session->connection->cancel_timeout(); + auto lock = session->connection->handler_runner->continue_lock(); + if(!lock) + return; + if((!ec || ec == asio::error::not_found) && session->request->streambuf.size() == session->request->streambuf.max_size()) { + auto response = std::shared_ptr(new Response(session, this->config.timeout_content)); + response->write(StatusCode::client_error_payload_too_large); + if(this->on_error) + this->on_error(session->request, make_error_code::make_error_code(errc::message_size)); + return; + } + if(!ec) { + std::string line; + getline(session->request->content, line); + bytes_transferred -= line.size() + 1; + line.pop_back(); + unsigned long length = 0; + try { + length = stoul(line, 0, 16); + } + catch(...) { + if(this->on_error) + this->on_error(session->request, make_error_code::make_error_code(errc::protocol_error)); + return; + } + + auto num_additional_bytes = session->request->streambuf.size() - bytes_transferred; + + if((2 + length) > num_additional_bytes) { + session->connection->set_timeout(config.timeout_content); + asio::async_read(*session->connection->socket, session->request->streambuf, asio::transfer_exactly(2 + length - num_additional_bytes), [this, session, chunks_streambuf, length](const error_code &ec, size_t /*bytes_transferred*/) { + session->connection->cancel_timeout(); + auto lock = session->connection->handler_runner->continue_lock(); + if(!lock) + return; + if(!ec) { + if(session->request->streambuf.size() == session->request->streambuf.max_size()) { + auto response = std::shared_ptr(new Response(session, this->config.timeout_content)); + response->write(StatusCode::client_error_payload_too_large); + if(this->on_error) + this->on_error(session->request, make_error_code::make_error_code(errc::message_size)); + return; + } + this->read_chunked_transfer_encoded_chunk(session, chunks_streambuf, length); + } + else if(this->on_error) + this->on_error(session->request, ec); + }); + } + else + this->read_chunked_transfer_encoded_chunk(session, chunks_streambuf, length); + } + else if(this->on_error) + this->on_error(session->request, ec); + }); + } + + void read_chunked_transfer_encoded_chunk(const std::shared_ptr &session, const std::shared_ptr &chunks_streambuf, unsigned long length) { + std::ostream tmp_stream(chunks_streambuf.get()); + if(length > 0) { + std::unique_ptr buffer(new char[length]); + session->request->content.read(buffer.get(), static_cast(length)); + tmp_stream.write(buffer.get(), static_cast(length)); + if(chunks_streambuf->size() == chunks_streambuf->max_size()) { + auto response = std::shared_ptr(new Response(session, this->config.timeout_content)); + response->write(StatusCode::client_error_payload_too_large); + if(this->on_error) + this->on_error(session->request, make_error_code::make_error_code(errc::message_size)); + return; + } + } + + // Remove "\r\n" + session->request->content.get(); + session->request->content.get(); + + if(length > 0) + read_chunked_transfer_encoded(session, chunks_streambuf); + else { + if(chunks_streambuf->size() > 0) { + std::ostream ostream(&session->request->streambuf); + ostream << chunks_streambuf.get(); + } + this->find_resource(session); + } + } + + void find_resource(const std::shared_ptr &session) { + // Upgrade connection + if(on_upgrade) { + auto it = session->request->header.find("Upgrade"); + if(it != session->request->header.end()) { + // remove connection from connections + { + std::lock_guard lock(*connections_mutex); + auto it = connections->find(session->connection.get()); + if(it != connections->end()) + connections->erase(it); + } + + on_upgrade(session->connection->socket, session->request); + return; + } + } + // Find path- and method-match, and call write + for(auto ®ex_method : resource) { + auto it = regex_method.second.find(session->request->method); + if(it != regex_method.second.end()) { + regex::smatch sm_res; + if(regex::regex_match(session->request->path, sm_res, regex_method.first)) { + session->request->path_match = std::move(sm_res); + write(session, it->second); + return; + } + } + } + auto it = default_resource.find(session->request->method); + if(it != default_resource.end()) + write(session, it->second); + } + + void write(const std::shared_ptr &session, + std::function::Response>, std::shared_ptr::Request>)> &resource_function) { + session->connection->set_timeout(config.timeout_content); + auto response = std::shared_ptr(new Response(session, config.timeout_content), [this](Response *response_ptr) { + auto response = std::shared_ptr(response_ptr); + response->send_on_delete([this, response](const error_code &ec) { + if(!ec) { + if(response->close_connection_after_response) + return; + + auto range = response->session->request->header.equal_range("Connection"); + for(auto it = range.first; it != range.second; it++) { + if(case_insensitive_equal(it->second, "close")) + return; + else if(case_insensitive_equal(it->second, "keep-alive")) { + auto new_session = std::make_shared(this->config.max_request_streambuf_size, response->session->connection); + this->read(new_session); + return; + } + } + if(response->session->request->http_version >= "1.1") { + auto new_session = std::make_shared(this->config.max_request_streambuf_size, response->session->connection); + this->read(new_session); + return; + } + } + else if(this->on_error) + this->on_error(response->session->request, ec); + }); + }); + + try { + resource_function(response, session->request); + } + catch(const std::exception &) { + if(on_error) + on_error(session->request, make_error_code::make_error_code(errc::operation_canceled)); + return; + } + } + }; + + template + class Server : public ServerBase {}; + + using HTTP = asio::ip::tcp::socket; + + template <> + class Server : public ServerBase { + public: + Server() noexcept : ServerBase::ServerBase(80) {} + + protected: + void accept() override { + auto connection = create_connection(*io_service); + + acceptor->async_accept(*connection->socket, [this, connection](const error_code &ec) { + auto lock = connection->handler_runner->continue_lock(); + if(!lock) + return; + + // Immediately start accepting a new connection (unless io_service has been stopped) + if(ec != asio::error::operation_aborted) + this->accept(); + + auto session = std::make_shared(config.max_request_streambuf_size, connection); + + if(!ec) { + asio::ip::tcp::no_delay option(true); + error_code ec; + session->connection->socket->set_option(option, ec); + + this->read(session); + } + else if(this->on_error) + this->on_error(session->request, ec); + }); + } + }; +} // namespace SimpleWeb + +#endif /* SERVER_HTTP_HPP */ \ No newline at end of file diff --git a/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/status_code.hpp b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/status_code.hpp new file mode 100644 index 0000000..1bba6a3 --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/status_code.hpp @@ -0,0 +1,176 @@ +#ifndef SIMPLE_WEB_STATUS_CODE_HPP +#define SIMPLE_WEB_STATUS_CODE_HPP + +#include +#include +#include +#include +#include + +namespace SimpleWeb { + enum class StatusCode { + unknown = 0, + information_continue = 100, + information_switching_protocols, + information_processing, + success_ok = 200, + success_created, + success_accepted, + success_non_authoritative_information, + success_no_content, + success_reset_content, + success_partial_content, + success_multi_status, + success_already_reported, + success_im_used = 226, + redirection_multiple_choices = 300, + redirection_moved_permanently, + redirection_found, + redirection_see_other, + redirection_not_modified, + redirection_use_proxy, + redirection_switch_proxy, + redirection_temporary_redirect, + redirection_permanent_redirect, + client_error_bad_request = 400, + client_error_unauthorized, + client_error_payment_required, + client_error_forbidden, + client_error_not_found, + client_error_method_not_allowed, + client_error_not_acceptable, + client_error_proxy_authentication_required, + client_error_request_timeout, + client_error_conflict, + client_error_gone, + client_error_length_required, + client_error_precondition_failed, + client_error_payload_too_large, + client_error_uri_too_long, + client_error_unsupported_media_type, + client_error_range_not_satisfiable, + client_error_expectation_failed, + client_error_im_a_teapot, + client_error_misdirection_required = 421, + client_error_unprocessable_entity, + client_error_locked, + client_error_failed_dependency, + client_error_upgrade_required = 426, + client_error_precondition_required = 428, + client_error_too_many_requests, + client_error_request_header_fields_too_large = 431, + client_error_unavailable_for_legal_reasons = 451, + server_error_internal_server_error = 500, + server_error_not_implemented, + server_error_bad_gateway, + server_error_service_unavailable, + server_error_gateway_timeout, + server_error_http_version_not_supported, + server_error_variant_also_negotiates, + server_error_insufficient_storage, + server_error_loop_detected, + server_error_not_extended = 510, + server_error_network_authentication_required + }; + + inline const std::map &status_code_strings() { + static const std::map status_code_strings = { + {StatusCode::unknown, ""}, + {StatusCode::information_continue, "100 Continue"}, + {StatusCode::information_switching_protocols, "101 Switching Protocols"}, + {StatusCode::information_processing, "102 Processing"}, + {StatusCode::success_ok, "200 OK"}, + {StatusCode::success_created, "201 Created"}, + {StatusCode::success_accepted, "202 Accepted"}, + {StatusCode::success_non_authoritative_information, "203 Non-Authoritative Information"}, + {StatusCode::success_no_content, "204 No Content"}, + {StatusCode::success_reset_content, "205 Reset Content"}, + {StatusCode::success_partial_content, "206 Partial Content"}, + {StatusCode::success_multi_status, "207 Multi-Status"}, + {StatusCode::success_already_reported, "208 Already Reported"}, + {StatusCode::success_im_used, "226 IM Used"}, + {StatusCode::redirection_multiple_choices, "300 Multiple Choices"}, + {StatusCode::redirection_moved_permanently, "301 Moved Permanently"}, + {StatusCode::redirection_found, "302 Found"}, + {StatusCode::redirection_see_other, "303 See Other"}, + {StatusCode::redirection_not_modified, "304 Not Modified"}, + {StatusCode::redirection_use_proxy, "305 Use Proxy"}, + {StatusCode::redirection_switch_proxy, "306 Switch Proxy"}, + {StatusCode::redirection_temporary_redirect, "307 Temporary Redirect"}, + {StatusCode::redirection_permanent_redirect, "308 Permanent Redirect"}, + {StatusCode::client_error_bad_request, "400 Bad Request"}, + {StatusCode::client_error_unauthorized, "401 Unauthorized"}, + {StatusCode::client_error_payment_required, "402 Payment Required"}, + {StatusCode::client_error_forbidden, "403 Forbidden"}, + {StatusCode::client_error_not_found, "404 Not Found"}, + {StatusCode::client_error_method_not_allowed, "405 Method Not Allowed"}, + {StatusCode::client_error_not_acceptable, "406 Not Acceptable"}, + {StatusCode::client_error_proxy_authentication_required, "407 Proxy Authentication Required"}, + {StatusCode::client_error_request_timeout, "408 Request Timeout"}, + {StatusCode::client_error_conflict, "409 Conflict"}, + {StatusCode::client_error_gone, "410 Gone"}, + {StatusCode::client_error_length_required, "411 Length Required"}, + {StatusCode::client_error_precondition_failed, "412 Precondition Failed"}, + {StatusCode::client_error_payload_too_large, "413 Payload Too Large"}, + {StatusCode::client_error_uri_too_long, "414 URI Too Long"}, + {StatusCode::client_error_unsupported_media_type, "415 Unsupported Media Type"}, + {StatusCode::client_error_range_not_satisfiable, "416 Range Not Satisfiable"}, + {StatusCode::client_error_expectation_failed, "417 Expectation Failed"}, + {StatusCode::client_error_im_a_teapot, "418 I'm a teapot"}, + {StatusCode::client_error_misdirection_required, "421 Misdirected Request"}, + {StatusCode::client_error_unprocessable_entity, "422 Unprocessable Entity"}, + {StatusCode::client_error_locked, "423 Locked"}, + {StatusCode::client_error_failed_dependency, "424 Failed Dependency"}, + {StatusCode::client_error_upgrade_required, "426 Upgrade Required"}, + {StatusCode::client_error_precondition_required, "428 Precondition Required"}, + {StatusCode::client_error_too_many_requests, "429 Too Many Requests"}, + {StatusCode::client_error_request_header_fields_too_large, "431 Request Header Fields Too Large"}, + {StatusCode::client_error_unavailable_for_legal_reasons, "451 Unavailable For Legal Reasons"}, + {StatusCode::server_error_internal_server_error, "500 Internal Server Error"}, + {StatusCode::server_error_not_implemented, "501 Not Implemented"}, + {StatusCode::server_error_bad_gateway, "502 Bad Gateway"}, + {StatusCode::server_error_service_unavailable, "503 Service Unavailable"}, + {StatusCode::server_error_gateway_timeout, "504 Gateway Timeout"}, + {StatusCode::server_error_http_version_not_supported, "505 HTTP Version Not Supported"}, + {StatusCode::server_error_variant_also_negotiates, "506 Variant Also Negotiates"}, + {StatusCode::server_error_insufficient_storage, "507 Insufficient Storage"}, + {StatusCode::server_error_loop_detected, "508 Loop Detected"}, + {StatusCode::server_error_not_extended, "510 Not Extended"}, + {StatusCode::server_error_network_authentication_required, "511 Network Authentication Required"}}; + return status_code_strings; + } + + inline StatusCode status_code(const std::string &status_code_string) noexcept { + if(status_code_string.size() < 3) + return StatusCode::unknown; + + auto number = status_code_string.substr(0, 3); + if(number[0] < '0' || number[0] > '9' || number[1] < '0' || number[1] > '9' || number[2] < '0' || number[2] > '9') + return StatusCode::unknown; + + class StringToStatusCode : public std::unordered_map { + public: + StringToStatusCode() { + for(auto &status_code : status_code_strings()) + emplace(status_code.second.substr(0, 3), status_code.first); + } + }; + static StringToStatusCode string_to_status_code; + + auto pos = string_to_status_code.find(number); + if(pos == string_to_status_code.end()) + return static_cast(atoi(number.c_str())); + return pos->second; + } + + inline const std::string &status_code(StatusCode status_code_enum) noexcept { + auto pos = status_code_strings().find(status_code_enum); + if(pos == status_code_strings().end()) { + static std::string empty_string; + return empty_string; + } + return pos->second; + } +} // namespace SimpleWeb + +#endif // SIMPLE_WEB_STATUS_CODE_HPP \ No newline at end of file diff --git a/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/utility.hpp b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/utility.hpp new file mode 100644 index 0000000..6e8586a --- /dev/null +++ b/ton-test-liteclient-full/lite-client-with-webserver/src/web_server/utility.hpp @@ -0,0 +1,363 @@ +#ifndef SIMPLE_WEB_UTILITY_HPP +#define SIMPLE_WEB_UTILITY_HPP + +#include "status_code.hpp" +#include +#include +#include +#include +#include +#include + +#if __cplusplus > 201402L || _MSVC_LANG > 201402L +#include +namespace SimpleWeb { + using string_view = std::string_view; +} +#elif !defined(USE_STANDALONE_ASIO) +#include +namespace SimpleWeb { + using string_view = boost::string_ref; +} +#else +namespace SimpleWeb { + using string_view = const std::string &; +} +#endif + +namespace SimpleWeb { + inline bool case_insensitive_equal(const std::string &str1, const std::string &str2) noexcept { + return str1.size() == str2.size() && + std::equal(str1.begin(), str1.end(), str2.begin(), [](char a, char b) { + return tolower(a) == tolower(b); + }); + } + class CaseInsensitiveEqual { + public: + bool operator()(const std::string &str1, const std::string &str2) const noexcept { + return case_insensitive_equal(str1, str2); + } + }; + // Based on https://stackoverflow.com/questions/2590677/how-do-i-combine-hash-values-in-c0x/2595226#2595226 + class CaseInsensitiveHash { + public: + std::size_t operator()(const std::string &str) const noexcept { + std::size_t h = 0; + std::hash hash; + for(auto c : str) + h ^= hash(tolower(c)) + 0x9e3779b9 + (h << 6) + (h >> 2); + return h; + } + }; + + using CaseInsensitiveMultimap = std::unordered_multimap; + + /// Percent encoding and decoding + class Percent { + public: + /// Returns percent-encoded string + static std::string encode(const std::string &value) noexcept { + static auto hex_chars = "0123456789ABCDEF"; + + std::string result; + result.reserve(value.size()); // Minimum size of result + + for(auto &chr : value) { + if(!((chr >= '0' && chr <= '9') || (chr >= 'A' && chr <= 'Z') || (chr >= 'a' && chr <= 'z') || chr == '-' || chr == '.' || chr == '_' || chr == '~')) + result += std::string("%") + hex_chars[static_cast(chr) >> 4] + hex_chars[static_cast(chr) & 15]; + else + result += chr; + } + + return result; + } + + /// Returns percent-decoded string + static std::string decode(const std::string &value) noexcept { + std::string result; + result.reserve(value.size() / 3 + (value.size() % 3)); // Minimum size of result + + for(std::size_t i = 0; i < value.size(); ++i) { + auto &chr = value[i]; + if(chr == '%' && i + 2 < value.size()) { + auto hex = value.substr(i + 1, 2); + auto decoded_chr = static_cast(std::strtol(hex.c_str(), nullptr, 16)); + result += decoded_chr; + i += 2; + } + else if(chr == '+') + result += ' '; + else + result += chr; + } + + return result; + } + }; + + /// Query string creation and parsing + class QueryString { + public: + /// Returns query string created from given field names and values + static std::string create(const CaseInsensitiveMultimap &fields) noexcept { + std::string result; + + bool first = true; + for(auto &field : fields) { + result += (!first ? "&" : "") + field.first + '=' + Percent::encode(field.second); + first = false; + } + + return result; + } + + /// Returns query keys with percent-decoded values. + static CaseInsensitiveMultimap parse(const std::string &query_string) noexcept { + CaseInsensitiveMultimap result; + + if(query_string.empty()) + return result; + + std::size_t name_pos = 0; + auto name_end_pos = std::string::npos; + auto value_pos = std::string::npos; + for(std::size_t c = 0; c < query_string.size(); ++c) { + if(query_string[c] == '&') { + auto name = query_string.substr(name_pos, (name_end_pos == std::string::npos ? c : name_end_pos) - name_pos); + if(!name.empty()) { + auto value = value_pos == std::string::npos ? std::string() : query_string.substr(value_pos, c - value_pos); + result.emplace(std::move(name), Percent::decode(value)); + } + name_pos = c + 1; + name_end_pos = std::string::npos; + value_pos = std::string::npos; + } + else if(query_string[c] == '=') { + name_end_pos = c; + value_pos = c + 1; + } + } + if(name_pos < query_string.size()) { + auto name = query_string.substr(name_pos, name_end_pos - name_pos); + if(!name.empty()) { + auto value = value_pos >= query_string.size() ? std::string() : query_string.substr(value_pos); + result.emplace(std::move(name), Percent::decode(value)); + } + } + + return result; + } + }; + + class HttpHeader { + public: + /// Parse header fields + static CaseInsensitiveMultimap parse(std::istream &stream) noexcept { + CaseInsensitiveMultimap result; + std::string line; + std::size_t param_end; + while(getline(stream, line) && (param_end = line.find(':')) != std::string::npos) { + std::size_t value_start = param_end + 1; + while(value_start + 1 < line.size() && line[value_start] == ' ') + ++value_start; + if(value_start < line.size()) + result.emplace(line.substr(0, param_end), line.substr(value_start, line.size() - value_start - (line.back() == '\r' ? 1 : 0))); + } + return result; + } + + class FieldValue { + public: + class SemicolonSeparatedAttributes { + public: + /// Parse Set-Cookie or Content-Disposition header field value. Attribute values are percent-decoded. + static CaseInsensitiveMultimap parse(const std::string &str) { + CaseInsensitiveMultimap result; + + std::size_t name_start_pos = std::string::npos; + std::size_t name_end_pos = std::string::npos; + std::size_t value_start_pos = std::string::npos; + for(std::size_t c = 0; c < str.size(); ++c) { + if(name_start_pos == std::string::npos) { + if(str[c] != ' ' && str[c] != ';') + name_start_pos = c; + } + else { + if(name_end_pos == std::string::npos) { + if(str[c] == ';') { + result.emplace(str.substr(name_start_pos, c - name_start_pos), std::string()); + name_start_pos = std::string::npos; + } + else if(str[c] == '=') + name_end_pos = c; + } + else { + if(value_start_pos == std::string::npos) { + if(str[c] == '"' && c + 1 < str.size()) + value_start_pos = c + 1; + else + value_start_pos = c; + } + else if(str[c] == '"' || str[c] == ';') { + result.emplace(str.substr(name_start_pos, name_end_pos - name_start_pos), Percent::decode(str.substr(value_start_pos, c - value_start_pos))); + name_start_pos = std::string::npos; + name_end_pos = std::string::npos; + value_start_pos = std::string::npos; + } + } + } + } + if(name_start_pos != std::string::npos) { + if(name_end_pos == std::string::npos) + result.emplace(str.substr(name_start_pos), std::string()); + else if(value_start_pos != std::string::npos) { + if(str.back() == '"') + result.emplace(str.substr(name_start_pos, name_end_pos - name_start_pos), Percent::decode(str.substr(value_start_pos, str.size() - 1))); + else + result.emplace(str.substr(name_start_pos, name_end_pos - name_start_pos), Percent::decode(str.substr(value_start_pos))); + } + } + + return result; + } + }; + }; + }; // namespace SimpleWeb + + class RequestMessage { + public: + /// Parse request line and header fields + static bool parse(std::istream &stream, std::string &method, std::string &path, std::string &query_string, std::string &version, CaseInsensitiveMultimap &header) noexcept { + std::string line; + std::size_t method_end; + if(getline(stream, line) && (method_end = line.find(' ')) != std::string::npos) { + method = line.substr(0, method_end); + + std::size_t query_start = std::string::npos; + std::size_t path_and_query_string_end = std::string::npos; + for(std::size_t i = method_end + 1; i < line.size(); ++i) { + if(line[i] == '?' && (i + 1) < line.size()) + query_start = i + 1; + else if(line[i] == ' ') { + path_and_query_string_end = i; + break; + } + } + if(path_and_query_string_end != std::string::npos) { + if(query_start != std::string::npos) { + path = line.substr(method_end + 1, query_start - method_end - 2); + query_string = line.substr(query_start, path_and_query_string_end - query_start); + } + else + path = line.substr(method_end + 1, path_and_query_string_end - method_end - 1); + + std::size_t protocol_end; + if((protocol_end = line.find('/', path_and_query_string_end + 1)) != std::string::npos) { + if(line.compare(path_and_query_string_end + 1, protocol_end - path_and_query_string_end - 1, "HTTP") != 0) + return false; + version = line.substr(protocol_end + 1, line.size() - protocol_end - 2); + } + else + return false; + + header = HttpHeader::parse(stream); + } + else + return false; + } + else + return false; + return true; + } + }; + + class ResponseMessage { + public: + /// Parse status line and header fields + static bool parse(std::istream &stream, std::string &version, std::string &status_code, CaseInsensitiveMultimap &header) noexcept { + std::string line; + std::size_t version_end; + if(getline(stream, line) && (version_end = line.find(' ')) != std::string::npos) { + if(5 < line.size()) + version = line.substr(5, version_end - 5); + else + return false; + if((version_end + 1) < line.size()) + status_code = line.substr(version_end + 1, line.size() - (version_end + 1) - 1); + else + return false; + + header = HttpHeader::parse(stream); + } + else + return false; + return true; + } + }; +} // namespace SimpleWeb + +#ifdef __SSE2__ +#include +namespace SimpleWeb { + inline void spin_loop_pause() noexcept { _mm_pause(); } +} // namespace SimpleWeb +// TODO: need verification that the following checks are correct: +#elif defined(_MSC_VER) && _MSC_VER >= 1800 && (defined(_M_X64) || defined(_M_IX86)) +#include +namespace SimpleWeb { + inline void spin_loop_pause() noexcept { _mm_pause(); } +} // namespace SimpleWeb +#else +namespace SimpleWeb { + inline void spin_loop_pause() noexcept {} +} // namespace SimpleWeb +#endif + +namespace SimpleWeb { + /// Makes it possible to for instance cancel Asio handlers without stopping asio::io_service + class ScopeRunner { + /// Scope count that is set to -1 if scopes are to be canceled + std::atomic count; + + public: + class SharedLock { + friend class ScopeRunner; + std::atomic &count; + SharedLock(std::atomic &count) noexcept : count(count) {} + SharedLock &operator=(const SharedLock &) = delete; + SharedLock(const SharedLock &) = delete; + + public: + ~SharedLock() noexcept { + count.fetch_sub(1); + } + }; + + ScopeRunner() noexcept : count(0) {} + + /// Returns nullptr if scope should be exited, or a shared lock otherwise + std::unique_ptr continue_lock() noexcept { + long expected = count; + while(expected >= 0 && !count.compare_exchange_weak(expected, expected + 1)) + spin_loop_pause(); + + if(expected < 0) + return nullptr; + else + return std::unique_ptr(new SharedLock(count)); + } + + /// Blocks until all shared locks are released, then prevents future shared locks + void stop() noexcept { + long expected = 0; + while(!count.compare_exchange_weak(expected, -1)) { + if(expected < 0) + return; + expected = 0; + spin_loop_pause(); + } + } + }; +} // namespace SimpleWeb + +#endif // SIMPLE_WEB_UTILITY_HPP \ No newline at end of file