Compare commits

..

12 commits

Author SHA1 Message Date
Brannon King
84333407bb sketch a method to return claim activations in a given block
it doesn't link properly
2019-06-04 10:27:44 -06:00
Brannon King
8ad7903aba made cache match legacy_master, removed my bad assert in undo 2019-05-28 15:18:50 -06:00
Brannon King
cc789fc517 added claimtrie field back to getblocktemplate
I also included a test to ensure that we don't forget it next time
2019-05-24 10:32:21 -06:00
Brannon King
e65e77f9bf
Undo compatibility (#281)
* added test for claimname RPC
2019-05-23 11:46:37 -06:00
lbrynaut
70e7743acc Fix a bug that treats all claims as our own wallet txs. 2019-05-22 15:56:52 -06:00
Brannon King
55f5f2049e allow rest/block/height.json
changes from review, added integration test
2019-05-13 15:04:19 -06:00
Brannon King
7f6daef99b code reuse between miner & validator
originally from BvbFan
2019-05-07 16:32:22 -06:00
Jeremy Kauffman
3006f4d99d fix missing spaces in headers 2019-05-07 14:10:51 -06:00
Brannon King
2a1a4ef9c1 pulled in a few minor keepers from the other rebase branch 2019-05-07 14:10:12 -06:00
Brannon King
5ca2e96f5b fixed small claim names coming out as numeric 2019-05-06 16:40:40 -06:00
Brannon King
db55cc6960 fixed slow-running unit tests 2019-05-06 16:29:17 -06:00
lbrynaut
4c9c79e9f5 Rebase lbry on to Bitcoin 0.17.
This contains significant rebase / merge / testing work by Naut
<lbrynaut@protonmail.com>, Anthony Fieroni <bvbfan@abv.bg> and Brannon
King <countprimes@gmail.com>.
2019-05-01 14:50:32 -05:00
177 changed files with 11513 additions and 20840 deletions

View file

@ -1,8 +1,8 @@
<!-- This issue tracker is only for technical issues related to lbrycrd (the LBRY blockchain).
<!-- This issue tracker is only for technical issues related to Bitcoin Core.
General questions and/or support requests are best directed to the community chat at https://chat.lbry.org.
General bitcoin questions and/or support requests are best directed to the Bitcoin StackExchange at https://bitcoin.stackexchange.com.
For reporting security issues, please email security@lbry.com.
For reporting security issues, please read instructions at https://bitcoincore.org/en/contact/.
If the node is "stuck" during sync or giving "block checksum mismatch" errors, please ensure your hardware is stable by running memtest and observe CPU temperature with a load-test tool such as linpack before creating an issue! -->
@ -13,7 +13,7 @@ If the node is "stuck" during sync or giving "block checksum mismatch" errors, p
<!--- How reliably can you reproduce the issue, what are the steps to do so? -->
<!-- What version of lbrycrd are you using, where did you get it (website, self-compiled, etc)? -->
<!-- What version of Bitcoin Core are you using, where did you get it (website, self-compiled, etc)? -->
<!-- What type of machine are you observing the error on (OS/CPU and disk type)? -->

2
.gitignore vendored
View file

@ -119,5 +119,3 @@ contrib/devtools/split-debug.sh
.idea
cmake-build-*/
compile_commands\.json

View file

@ -1,88 +1,70 @@
language: minimal
filter_secrets: false
cache:
directories:
- ${HOME}/ccache
stages:
- build
- test
jobs:
matrix:
include:
- &build-template
stage: build
name: linux
env: NAME=linux DOCKER_IMAGE=lbry/build_lbrycrd_gcc EXT=
os: linux
dist: xenial
language: minimal
services:
- docker
install:
- mkdir -p ${HOME}/ccache
- docker pull $DOCKER_IMAGE
script:
- echo "build..."
- docker run -v "$(pwd):/lbrycrd" -v "${HOME}/ccache:/ccache" -w /lbrycrd -e CCACHE_DIR=/ccache ${DOCKER_IMAGE} packaging/build_${NAME}_64bit.sh
before_deploy:
- mkdir -p dist
- sudo zip -Xj dist/lbrycrd-${NAME}.zip src/lbrycrdd${EXT} src/lbrycrd-cli${EXT} src/lbrycrd-tx${EXT}
- sudo zip -Xj dist/lbrycrd-${NAME}-test.zip src/test/test_lbrycrd${EXT} src/test/test_lbrycrd_fuzzy${EXT}
- sha256sum dist/lbrycrd-${NAME}.zip
- sha256sum dist/lbrycrd-${NAME}-test.zip
deploy:
- provider: s3
access_key_id: AKIAICKFHNTR5RITASAQ
secret_access_key:
secure: Qfgs8vGnEUvgiZNP2S9zY8qHEzaDOceF/XTv32jRBOISWfTqOTE56DZbOp8WKHPAqn0dx04jKA1NfV9f06sXU1NVbiJ2VYISo6XAk0n3RBJL3/mhNxvut/zM2DHkFPljWTkWEColS0ZyA3m4eUyJvAw/i+mOBT/zDD/oIlS5Uo5l/x3LmF9fYBuei0ucwSQeNOr2wCMIl+pXrIU7B3lEzXh1asayW6A9y7DOqMLnrSQ7TLlSssbnhuhDVpFx0xxX/U2NPraotbGKdo3wwMbms/lluBe60I/LsDNp9/SZXMDXh2YLGUImr97octwpdzIMjF+kU7QAZJzM7grz8PU9+MQh2V5sn6Xsww2x4PdkmHGz/2FMzhrCrlPf5JCaPBH49G+w4/29HYmMrlimOOVx4qXCpQ/XtWWne/d6MF0qqT6JhdPuD9ohmTpxcHRkCe2fxUw6Yn3dj+/+YoCywAcwcBm5jLpAotmWoCmmcnm9rvB7bIuPPZAjJUZViCnyvwY4Tj3Fb+sOuK4b/O5D2+cuS+WgQRkN/RspYlXrXTIh8Efv/yhW5L9WdzG1OExJDw2hX5VTccRRgIKZxZp80U2eYqn2M07+1nU+ShX4kgiSon46k5cfacLgzLKWEyCxWSSTbsYcwRxvDEjtYy4wxAYx8+J3dgQPs/opDXoQTJMjud0=
bucket: build.lbry.io
upload-dir: lbrycrd/${TRAVIS_BRANCH}
acl: public_read
local_dir: dist
skip_cleanup: true
on:
repo: lbryio/lbrycrd
all_branches: true
- <<: *build-template
name: windows
env: NAME=windows DOCKER_IMAGE=lbry/build_lbrycrd EXT=.exe
- <<: *build-template
name: osx
env: NAME=darwin DOCKER_IMAGE=lbry/build_lbrycrd EXT=
before_install:
- mkdir -p ./depends/SDKs && pushd depends/SDKs && curl -C - ${MAC_OS_SDK} | tar --skip-old-files -xJ && popd
- &test-template
stage: test
env: NAME=linux
os: linux
dist: xenial
language: minimal
git:
clone: false
install:
- mkdir -p testrun && cd testrun
- curl http://build.lbry.io/lbrycrd/${TRAVIS_BRANCH}/lbrycrd-${NAME}-test.zip -o temp.zip
- unzip temp.zip
script: TRIEHASH_FUZZER_BLOCKS=1000 ./test_lbrycrd
- <<: *test-template
# os: windows # doesn't support secrets at the moment
os: linux
dist: xenial
env: NAME=windows
services:
- docker
script:
- docker pull lbry/wine
- docker run -v "$(pwd):/test" -e "WINEDEBUG=-all" -e "TRIEHASH_FUZZER_BLOCKS=1000" -it lbry/wine wine "/test/test_lbrycrd.exe"
- <<: *test-template
os: osx
osx_image: xcode8.3
env: NAME=darwin
- os: linux
sudo: required
dist: xenial
language: c
env: TARGET=linux
- os: linux
sudo: required
dist: xenial
language: c
env: TARGET=windows
- os: osx
language: c
osx_image: xcode8.3
env: TARGET=osx
cache:
apt: true
ccache: true
directories:
- build
- depends/built
git:
depth: false
before_install:
- date +%s > "${TRAVIS_BUILD_DIR}/start_time"
- ls -lh build
- du -h -d 2 build
- if [ "$TRAVIS_OS_NAME" == "osx" ]; then brew install ccache; fi
- if [ "$TRAVIS_OS_NAME" == "osx" ]; then export PATH="/usr/local/opt/ccache/libexec:$PATH"; fi
install: true
script:
- mkdir -p "dist/${TRAVIS_BRANCH}"
- if [[ "${TARGET}" == "osx" ]]; then ./reproducible_build.sh -t -o -c -r; fi
- if [[ "${TARGET}" == "linux" ]]; then ./reproducible_build.sh -t -o -c -r; fi
- if [[ "${TARGET}" == "windows" ]]; then ./packaging/build_windows.sh; fi
- if [[ "${TARGET}" == "osx" ]]; then zip -j "dist/${TRAVIS_BRANCH}/lbrycrd-${TARGET}.zip" src/lbrycrdd src/lbrycrd-cli src/lbrycrd-tx; fi
- if [[ "${TARGET}" == "linux" ]]; then zip -j "dist/${TRAVIS_BRANCH}/lbrycrd-${TARGET}.zip" src/lbrycrdd src/lbrycrd-cli src/lbrycrd-tx; fi
- if [[ "${TARGET}" == "windows" ]]; then zip -j "dist/${TRAVIS_BRANCH}/lbrycrd-${TARGET}.zip" src/lbrycrdd.exe src/lbrycrd-cli.exe src/lbrycrd-tx.exe; fi
- if [[ "${TARGET}" == "osx" ]]; then shasum -a 256 dist/${TRAVIS_BRANCH}/lbrycrd-${TARGET}.zip > dist/${TRAVIS_BRANCH}/lbrycrd-${TARGET}-sha256.txt; fi
- if [[ "${TARGET}" == "linux" ]]; then sha256sum -b dist/${TRAVIS_BRANCH}/lbrycrd-${TARGET}.zip > dist/${TRAVIS_BRANCH}/lbrycrd-${TARGET}-sha256.txt; fi
- if [[ "${TARGET}" == "windows" ]]; then sha256sum -b dist/${TRAVIS_BRANCH}/lbrycrd-${TARGET}.zip > dist/${TRAVIS_BRANCH}/lbrycrd-${TARGET}-sha256.txt; fi
- cat dist/${TRAVIS_BRANCH}/lbrycrd-${TARGET}-sha256.txt
before_cache:
- ls -lh build
- du -h -d 2 build
deploy:
- provider: releases
draft: true
file: "dist/${TRAVIS_BRANCH}/lbrycrd-${TARGET}.zip"
name: "${TRAVIS_BRANCH}"
skip_cleanup: true
target_commitish: $TRAVIS_COMMIT
tag_name: $TRAVIS_TAG
on:
tags: true
api_key:
secure: "Ni5WZNR5CefWXpyDUQLMQbQ2LH4Iot+0SqIoM9c4maW06al1M8vu57vWuj2cESsW7JsaBehCE45Cwmo5kWyEjAiZY8sIMmvixkMP/8uPWuLgNmnIbm7U+d0j652DmZshDYtt8EomqV2RhAx/rmBnzGkruLOw9WTp9ZdBN3WbTt/IpZ2gMgVbGWYGOx+uRw7/yGw8m4gShQheto/dycbyyR3XV2WP9wuLmNYkcQ6JumSoQdDWXcvVfbCwylGq2sLDKwhvfTr4iwYyYsWdmhfdEQl0WcIv5C8xgdiY2vzhi2LmLqFbS/fvKNC26Tfo4bOHFG/eOnvqc+yyEB8B/xqW9Gs+A0TUh/3N30vHYZGcpiDU35DwAN5bZ1+s+mr/ZrNzBJ5BgT8io3g0Ko8gykbDvFQVpg7kxFsqA1YCikEpG86lVGk6clTa5guJvAHse+DfnbWO1nfDxYQXW0md861m0txk8RpTC/TVNyH/lL/vsS7LB67EHhRdZY+O1+5sUGMdtvvhMoxJYCwQGpLkh43KRsKynkMUR94w2O9hc8cknXdV3wrndVz00XNdcur6y4D7HTll1tBrF68CA2yKUSY5hsjtPmdlN+DW8ou/rJiKOpQZ/Xzp69AQEheOFfDPItxQRYxWj0dMOk8eszf0wFvi1N7J/hT/IHnuX5ITfa/T4NE="
- provider: s3
access_key_id: AKIAICKFHNTR5RITASAQ
secret_access_key:
secure: Qfgs8vGnEUvgiZNP2S9zY8qHEzaDOceF/XTv32jRBOISWfTqOTE56DZbOp8WKHPAqn0dx04jKA1NfV9f06sXU1NVbiJ2VYISo6XAk0n3RBJL3/mhNxvut/zM2DHkFPljWTkWEColS0ZyA3m4eUyJvAw/i+mOBT/zDD/oIlS5Uo5l/x3LmF9fYBuei0ucwSQeNOr2wCMIl+pXrIU7B3lEzXh1asayW6A9y7DOqMLnrSQ7TLlSssbnhuhDVpFx0xxX/U2NPraotbGKdo3wwMbms/lluBe60I/LsDNp9/SZXMDXh2YLGUImr97octwpdzIMjF+kU7QAZJzM7grz8PU9+MQh2V5sn6Xsww2x4PdkmHGz/2FMzhrCrlPf5JCaPBH49G+w4/29HYmMrlimOOVx4qXCpQ/XtWWne/d6MF0qqT6JhdPuD9ohmTpxcHRkCe2fxUw6Yn3dj+/+YoCywAcwcBm5jLpAotmWoCmmcnm9rvB7bIuPPZAjJUZViCnyvwY4Tj3Fb+sOuK4b/O5D2+cuS+WgQRkN/RspYlXrXTIh8Efv/yhW5L9WdzG1OExJDw2hX5VTccRRgIKZxZp80U2eYqn2M07+1nU+ShX4kgiSon46k5cfacLgzLKWEyCxWSSTbsYcwRxvDEjtYy4wxAYx8+J3dgQPs/opDXoQTJMjud0=
bucket: build.lbry.io
upload-dir: lbrycrd
acl: public_read
local_dir: dist
skip_cleanup: true
on:
repo: lbryio/lbrycrd
all_branches: true

56
CMakeLists.txt Normal file
View file

@ -0,0 +1,56 @@
cmake_minimum_required(VERSION 3.7)
project(lbrycrd_clion) # Do not use for full compile. This is for CLion syntax checking only.
set (CMAKE_CXX_STANDARD 11)
if(EXISTS "build/boost")
set(BOOST_ROOT "build/boost" CACHE PATH "Boost library path")
set(Boost_NO_SYSTEM_PATHS on CACHE BOOL "Do not search system for Boost")
endif()
find_package(Boost REQUIRED COMPONENTS filesystem thread chrono locale)
file(GLOB sources
src/*.h src/*.cpp
src/wallet/*.h src/wallet/*.cpp
src/support/*.h src/support/*.cpp src/support/allocators/*.h
src/script/*.h src/script/*.cpp
src/index/*.h src/index/*.cpp
src/interfaces/*.h src/interfaces/*.cpp
src/primitives/*.h src/primitives/*.cpp
src/policy/*.h src/policy/*.cpp
src/crypto/*.h src/crypto/*.cpp
src/consensus/*.h src/consensus/*.cpp
src/compat/*.h src/compat/*.cpp
src/rpc/*.h src/rpc/*.cpp
)
list(FILTER sources EXCLUDE REGEX "src/bitcoin*.cpp$")
include_directories(${Boost_INCLUDE_DIRS}
build/bdb/include
build/libevent/include
build/openssl/include
src/support/allocators
src/support
src/rpc
src/policy
src/wallet src/script
src/leveldb/helpers/memenv
src/leveldb/include
src/config
src/crypto
src/compat
src/obj
src/univalue/include
src/secp256k1/include
src/
)
add_compile_definitions(HAVE_CONFIG_H)
add_executable(lbrycrd-cli src/bitcoin-cli.cpp ${sources})
add_executable(lbrycrd-tx src/bitcoin-tx.cpp ${sources})
add_executable(lbrycrdd src/bitcoind.cpp ${sources})
file(GLOB tests src/test/*.cpp src/wallet/test/*.cpp)
add_executable(test_lbrycrd ${tests} ${sources})
target_include_directories(test_lbrycrd PRIVATE src/test)

200
README.md
View file

@ -1,82 +1,44 @@
# LBRYcrd - The LBRY blockchain
[![Build Status](https://travis-ci.org/lbryio/lbrycrd.svg?branch=master)](https://travis-ci.org/lbryio/lbrycrd)
[![MIT licensed](https://img.shields.io/dub/l/vibe-d.svg?style=flat)](https://github.com/lbryio/lbry-desktop/blob/master/LICENSE)
![alt text](lbrycrdd_daemon_screenshot.png "lbrycrdd daemon screenshot")
LBRYcrd uses a blockchain similar to bitcoin's to implement an index and payment system for content on the LBRY network. It is a fork of [bitcoin core](https://github.com/bitcoin/bitcoin). In addition to the libraries used by bitcoin, LBRYcrd also uses [icu4c](https://github.com/unicode-org/icu/tree/master/icu4c).
Please read the [lbry.tech overview](https://lbry.tech/overview) for a general understanding of the LBRY pieces. From there you could read the [LBRY spec](https://spec.lbry.com/) for specifics on the data in the blockchain.
## Table of Contents
1. [Installation](#installation)
2. [Usage](#usage)
1. [Examples](#examples)
2. [Data directory](#data-directory)
3. [Running from Source](#running-from-source)
1. [Ubuntu with pulled static dependencies](#ubuntu-with-pulled-static-dependencies)
2. [Ubuntu with local shared dependencies](#ubuntu-with-local-shared-dependencies)
3. [MacOS (cross-compiled)](<#macos-(cross-compiled)>)
4. [MacOS with local shared dependencies](#macos-with-local-shared-dependencies)
5. [Windows (cross-compiled)](<#windows-(cross-compiled)>)
6. [Use with CLion](#use-with-clion)
4. [Contributing](#contributing)
- [Testnet](#testnet)
5. [Mailing List](#mailing-list)
6. [License](#license)
7. [Security](#security)
8. [Contact](#contact)
LBRYcrd uses a blockchain similar to bitcoin's to implement an index and payment system for content on the LBRY network. It is a fork of bitcoin core.
## Installation
Latest binaries are available from https://github.com/lbryio/lbrycrd/releases. There is no installation procedure; the CLI binaries will run as-is and will have any uncommon dependencies statically linked into the binary. The QT GUI is not supported. LBRYcrd is distributed as a collection of executable files; traditional installers are not provided.
Latest binaries are available from https://github.com/lbryio/lbrycrd/releases. There is no installation procedure, the binaries will be run as-is.
## Usage
The `lbrycrdd` executable will start a LBRYcrd node and connect you to the LBRYcrd network. Use the `lbrycrd-cli` executable
to interact with lbrycrdd through the command line. Command-line help for both executables are available through
the "--help" flag (e.g. `lbrycrdd --help`). Examples:
to interact with lbrycrdd through the command line. Help pages for both executable are available through
the "--help" flag (e.g. `lbrycrd-cli --help`).
#### Examples
### Example Usage
Run `./lbrycrdd -server -daemon` to start lbrycrdd in the background.
Run `./lbrycrd-cli -getinfo` to check for some basic information about your LBRYcrd node.
Run `./lbrycrd-cli getinfo` to check for some basic information about your LBRYcrd node.
Run `./lbrycrd-cli help` to get a list of all commands that you can run. To get help on specific commands run `./lbrycrd-cli [command_name] help`
Test locally:
### Data directory
```sh
./lbrycrdd -server -regtest -txindex # run this in its own window
./lbrycrd-cli -regtest generate 120 # mine 20 spendable coins
./lbrycrd-cli -regtest claimname my_name deadbeef 1 # hold a name claim with 1 coin
./lbrycrd-cli -regtest generate 1 # get that claim into the block
./lbrycrd-cli -regtest listnameclaims # show owned claims
./lbrycrd-cli -regtest getclaimsforname my_name # show claims under that name
./lbrycrd-cli -regtest stop # kill lbrycrdd
rm -fr ~/.lbrycrd/regtest/ # destroy regtest data
```
Lbrycrdd will use the below default data directories:
For further understanding of a "regtest" setup, see the local stack setup instructions here: https://lbry.tech/resources/regtest-setup
Windows < Vista: C:\Documents and Settings\Username\Application Data\lbrycrd
The CLI help is also browsable online at https://lbry.tech/api/blockchain
Windows >= Vista: C:\Users\Username\AppData\Roaming\lbrycrd
#### Data directory
Mac: ~/Library/Application Support/lbrycrd
Lbrycrdd will use the below default data directories (changeable with -datadir):
```sh
Windows: %APPDATA%\lbrycrd
Mac: ~/Library/Application Support/lbrycrd
Unix: ~/.lbrycrd
```
Unix: ~/.lbrycrd
The data directory contains various things such as your default wallet (wallet.dat), debug logs (debug.log), and blockchain data. You can optionally create a configuration file lbrycrd.conf in the default data directory which will be used by default when running lbrycrdd.
For a list of configuration parameters, run `./lbrycrdd --help`. Below is a sample lbrycrd.conf to enable JSON RPC server on lbrycrdd.
```sh
```
rpcuser=lbry
rpcpassword=xyz123456790
daemon=1
@ -86,118 +48,13 @@ txindex=1
## Running from Source
The easiest way to compile is to utilize the Docker image that contains the necessary compilers: lbry/build_lbrycrd. This will allow you to reproduce the build as made on our build servers. In this sample we map a local lbrycrd folder and a local ccache folder inside the image:
```sh
git clone https://github.com/lbryio/lbrycrd.git
cd lbrycrd
docker run -v "$(pwd):/lbrycrd" --rm -v "${HOME}/ccache:/ccache" -w /lbrycrd -e CCACHE_DIR=/ccache lbry/build_lbrycrd packaging/build_linux_64bit.sh
```
Run `./reproducible_build.sh -c -t`. This will build the binaries and put them into the `./src` directory.
Some examples of compiling directly:
#### Ubuntu with pulled static dependencies
```sh
sudo apt install build-essential git libtool autotools-dev automake pkg-config bsdmainutils curl ca-certificates
git clone https://github.com/lbryio/lbrycrd.git
cd lbrycrd
./packaging/build_linux_64bit.sh
./src/test/test_lbrycrd
```
Other Linux distros would be similar. The build shell script is fairly trivial; take a peek at its contents.
#### Ubuntu with local shared dependencies
Note: using untested dependencies may lead to conflicting results.
```sh
sudo add-apt-repository ppa:bitcoin/bitcoin
sudo apt-get update
sudo apt-get install libdb4.8-dev libdb4.8++-dev libicu-dev libssl-dev libevent-dev \
build-essential git libtool autotools-dev automake pkg-config bsdmainutils curl ca-certificates \
libboost-system-dev libboost-filesystem-dev libboost-chrono-dev libboost-test-dev libboost-thread-dev
# optionally include libminiupnpc-dev libzmq3-dev
git clone https://github.com/lbryio/lbrycrd.git
cd lbrycrd
./autogen.sh
./configure --enable-static --disable-shared --with-pic --without-gui CXXFLAGS="-O3 -march=native"
make -j$(nproc)
./src/lbrycrdd -server ...
```
#### MacOS (cross-compiled)
```sh
sudo apt-get install clang llvm git libtool autotools-dev automake pkg-config bsdmainutils curl ca-certificates \
libboost-system-dev libboost-filesystem-dev libboost-chrono-dev libboost-test-dev libboost-thread-dev
git clone https://github.com/lbryio/lbrycrd.git
cd lbrycrd
# download MacOS SDK from your favorite source
mkdir depends/SDKs
tar ... extract SDK to depends/SDKs/MacOSX10.11.sdk
./packaging/build_darwin_64bit.sh
```
Look in packaging/build_darwin_64bit.sh for further understanding.
#### MacOS with local shared dependencies
```sh
brew install boost berkeley-db@4 icu4c libevent
# fix conflict with gawk pulled first:
brew reinstall readline
brew reinstall gawk
git clone https://github.com/lbryio/lbrycrd.git
cd lbrycrd/depends
make NO_QT=1
cd ..
./autogen.sh
CONFIG_SITE=$(pwd)/depends/x86_64-apple-darwin15.6.0/share/config.site ./configure --enable-static --disable-shared --with-pic --without-gui --enable-reduce-exports CXXFLAGS=-O2
make -j$(sysctl -n hw.ncpu)
```
#### Windows (cross-compiled)
Compiling on MS Windows (outside of WSL) is not supported. The Windows build is cross-compiled from Linux like so:
```sh
sudo apt-get install build-essential git libtool autotools-dev automake pkg-config bsdmainutils curl ca-certificates \
g++-mingw-w64-x86-64 mingw-w64-x86-64-dev
update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix
git clone https://github.com/lbryio/lbrycrd.git
cd lbrycrd
./packaging/build_windows_64bit.sh
```
If you encounter any errors, please check `doc/build-*.md` for further instructions. If you're still stuck, [create an issue](https://github.com/lbryio/lbrycrd/issues/new) with the output of that command, your system info, and any other information you think might be helpful. The scripts in the packaging folder are simple and will grant extra light on the build process as needed.
#### Use with CLion
CLion has not traditionally supported Autotools projects, although some progress on that is now in the works. We do include a cmake build file for compiling lbrycrd. See contrib/cmake. Alas, CLion doesn't support external projects in cmake, so that particular approach is also insufficient. CLion does support "compile_commands.json" projects. Fortunately, this can be easily generated for lbrycrd like so:
```sh
pip install --user compiledb
./autogen.sh && ./configure --enable-static=no --enable-shared --with-pic --without-gui CXXFLAGS="-O0 -g" CFLAGS="-O0 -g" # or whatever normal lbrycrd config
compiledb make -j10
```
Then open the newly generated compile_commands.json file as a project in CLion. Debugging is supported if you compiled with `-g`. To enable that you will need to create a target in CLion by going to File -> Settings -> Build -> Custom Build Targets. Add an empty target with your choice of name. From there you can go to "Edit Configurations", typically found in a drop-down at the top of the editor. Add a Custom Build Application, select your new target, select the compiled file (i.e. test_lbrycrd or lbrycrdd, etc), and then add any necessary command line parameters. Ensure that there is nothing in the "Before launch" section.
If you encounter any errors, please check `doc/build-*.md` for further instructions. If you're still stuck, [create an issue](https://github.com/lbryio/lbrycrd/issues/new) with the output of that command, your system info, and any other information you think might be helpful.
## Contributing
Contributions to this project are welcome, encouraged, and compensated. For more details, see [https://lbry.tech/contribute](https://lbry.tech/contribute)
Contributions to this project are welcome, encouraged, and compensated. For more details, see [lbry.io/faq/contributing](https://lbry.io/faq/contributing)
We follow the same coding guidelines as documented by Bitcoin Core, see [here](/doc/developer-notes.md). To run an automated code formatting check, try:
`git diff -U0 master -- '*.h' '*.cpp' | ./contrib/devtools/clang-format-diff.py -p1`. This will check any commits not on master for proper code formatting.
@ -210,20 +67,15 @@ regularly to indicate new official, stable release versions.
Testing and code review is the bottleneck for development; we get more pull
requests than we can review and test on short notice. Please be patient and help out by testing
other people's pull requests, and remember this is a security-critical project where any mistake might cost people
lots of money. Developers are strongly encouraged to write [unit tests](/src/test/README.md) for new code and to
submit new unit tests for old code. Unit tests are compiled by default and can be run with `src/test/test_lbrycrd`
lots of money. Developers are strongly encouraged to write [unit tests](/doc/unit-tests.md) for new code, and to
submit new unit tests for old code. Unit tests can be compiled and run
(assuming they weren't disabled in configure) with: `make check`
The Travis CI system makes sure that every pull request is built, and that unit and sanity tests are automatically run. See https://travis-ci.org/lbryio/lbrycrd
The Travis CI system makes sure that every pull request is built, and that unit and sanity tests are automatically run.
### Testnet
Testnet is maintained for testing purposes and can be accessed using the command `./lbrycrdd -testnet`. If you would like to obtain testnet credits, please contact brannon@lbry.com or grin@lbry.com .
It is easy to solo mine on testnet. (It's easy on mainnet too, but much harder to win.) For instructions see [SGMiner](https://github.com/lbryio/sgminer-gm) and [Mining Contributions](https://github.com/lbryio/lbrycrd/tree/master/contrib/mining)
## Mailing List
We maintain a mailing list for notifications of upgrades, security issues, and soft/hard forks. To join, visit [https://lbry.com/forklist](https://lbry.com/forklist).
Testnet is maintained for testing purposes and can be accessed using the command `./lbrycrdd -testnet`. If you would like to obtain testnet credits, please contact brannon@lbry.io or grin@lbry.io.
## License
@ -231,9 +83,11 @@ This project is MIT licensed. For the full license, see [LICENSE](LICENSE).
## Security
We take security seriously. Please contact [security@lbry.com](mailto:security@lbry.com) regarding any security issues.
Our PGP key is [here](https://lbry.com/faq/pgp-key) if you need it.
We take security seriously. Please contact security@lbry.io regarding any security issues.
Our PGP key is [here](https://keybase.io/lbry/key.asc) if you need it.
## Contact
The primary contact for this project is [@BrannonKing](https://github.com/BrannonKing) (brannon@lbry.com)
The primary contact for this project is [@BrannonKing](https://github.com/BrannonKing) (brannon@lbry.io)

View file

@ -7,7 +7,7 @@ export LC_ALL=C
set -e
srcdir="$(dirname $0)"
cd "$srcdir"
if [ -z ${LIBTOOLIZE} ] && GLIBTOOLIZE="$(which glibtoolize 2>/dev/null)"; then
if [ -z ${LIBTOOLIZE} ] && GLIBTOOLIZE="`which glibtoolize 2>/dev/null`"; then
LIBTOOLIZE="${GLIBTOOLIZE}"
export LIBTOOLIZE
fi

View file

@ -33,73 +33,73 @@
AC_DEFUN([AX_BOOST_LOCALE],
[
AC_ARG_WITH([boost-locale],
AS_HELP_STRING([--with-boost-locale@<:@=special-lib@:>@],
AC_ARG_WITH([boost-locale],
AS_HELP_STRING([--with-boost-locale@<:@=special-lib@:>@],
[use the Locale library from boost - it is possible to specify a certain library for the linker
e.g. --with-boost-locale=boost_locale-gcc-mt ]),
[
if test "$withval" = "no"; then
want_boost="no"
want_boost="no"
elif test "$withval" = "yes"; then
want_boost="yes"
ax_boost_user_locale_lib=""
else
want_boost="yes"
ax_boost_user_locale_lib="$withval"
fi
want_boost="yes"
ax_boost_user_locale_lib="$withval"
fi
],
[want_boost="yes"]
)
)
if test "x$want_boost" = "xyes"; then
if test "x$want_boost" = "xyes"; then
AC_REQUIRE([AC_PROG_CC])
AC_REQUIRE([AC_CANONICAL_BUILD])
CPPFLAGS_SAVED="$CPPFLAGS"
CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
export CPPFLAGS
CPPFLAGS_SAVED="$CPPFLAGS"
CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
export CPPFLAGS
LDFLAGS_SAVED="$LDFLAGS"
LDFLAGS="$LDFLAGS $BOOST_LDFLAGS"
export LDFLAGS
LDFLAGS_SAVED="$LDFLAGS"
LDFLAGS="$LDFLAGS $BOOST_LDFLAGS"
export LDFLAGS
AC_CACHE_CHECK(whether the Boost::Locale library is available,
ax_cv_boost_locale,
ax_cv_boost_locale,
[AC_LANG_PUSH([C++])
CXXFLAGS_SAVE=$CXXFLAGS
CXXFLAGS_SAVE=$CXXFLAGS
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[@%:@include <boost/locale.hpp>]],
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[@%:@include <boost/locale.hpp>]],
[[boost::locale::generator gen;
std::locale::global(gen(""));]])],
ax_cv_boost_locale=yes, ax_cv_boost_locale=no)
CXXFLAGS=$CXXFLAGS_SAVE
CXXFLAGS=$CXXFLAGS_SAVE
AC_LANG_POP([C++])
])
if test "x$ax_cv_boost_locale" = "xyes"; then
AC_SUBST(BOOST_CPPFLAGS)
])
if test "x$ax_cv_boost_locale" = "xyes"; then
AC_SUBST(BOOST_CPPFLAGS)
AC_DEFINE(HAVE_BOOST_LOCALE,,[define if the Boost::Locale library is available])
AC_DEFINE(HAVE_BOOST_LOCALE,,[define if the Boost::Locale library is available])
BOOSTLIBDIR=`echo $BOOST_LDFLAGS | sed -e 's/@<:@^\/@:>@*//'`
LDFLAGS_SAVE=$LDFLAGS
LDFLAGS_SAVE=$LDFLAGS
if test "x$ax_boost_user_locale_lib" = "x"; then
for libextension in `ls $BOOSTLIBDIR/libboost_locale*.so* $BOOSTLIBDIR/libboost_locale*.dylib* $BOOSTLIBDIR/libboost_locale*.a* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^lib\(boost_locale.*\)\.so.*$;\1;' -e 's;^lib\(boost_locale.*\)\.dylib.*$;\1;' -e 's;^lib\(boost_locale.*\)\.a.*$;\1;'` ; do
ax_lib=${libextension}
AC_CHECK_LIB($ax_lib, exit,
AC_CHECK_LIB($ax_lib, exit,
[BOOST_LOCALE_LIB="-l$ax_lib"; AC_SUBST(BOOST_LOCALE_LIB) link_locale="yes"; break],
[link_locale="no"])
done
done
if test "x$link_locale" != "xyes"; then
for libextension in `ls $BOOSTLIBDIR/boost_locale*.dll* $BOOSTLIBDIR/boost_locale*.a* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^\(boost_locale.*\)\.dll.*$;\1;' -e 's;^\(boost_locale.*\)\.a.*$;\1;'` ; do
ax_lib=${libextension}
AC_CHECK_LIB($ax_lib, exit,
AC_CHECK_LIB($ax_lib, exit,
[BOOST_LOCALE_LIB="-l$ax_lib"; AC_SUBST(BOOST_LOCALE_LIB) link_locale="yes"; break],
[link_locale="no"])
done
done
fi
else
for ax_lib in $ax_boost_user_locale_lib boost_locale-$ax_boost_user_locale_lib; do
AC_CHECK_LIB($ax_lib, exit,
AC_CHECK_LIB($ax_lib, exit,
[BOOST_LOCALE_LIB="-l$ax_lib"; AC_SUBST(BOOST_LOCALE_LIB) link_locale="yes"; break],
[link_locale="no"])
done
@ -108,13 +108,13 @@ AC_DEFUN([AX_BOOST_LOCALE],
if test "x$ax_lib" = "x"; then
AC_MSG_ERROR(Could not find a version of the library!)
fi
if test "x$link_locale" = "xno"; then
AC_MSG_ERROR(Could not link against $ax_lib !)
fi
fi
if test "x$link_locale" = "xno"; then
AC_MSG_ERROR(Could not link against $ax_lib !)
fi
fi
CPPFLAGS="$CPPFLAGS_SAVED"
LDFLAGS="$LDFLAGS_SAVED"
fi
CPPFLAGS="$CPPFLAGS_SAVED"
LDFLAGS="$LDFLAGS_SAVED"
fi
])

View file

@ -1,82 +0,0 @@
#!/usr/bin/env bash
set -o pipefail
function HELP {
echo "Use this command to build lbrycrd."
echo "Dependencies will be pulled and built first."
echo "Use autogen & configure directly to avoid this and use system shared libraries instead."
echo
echo "Optional arguments:"
echo "-jN: number of parallel build jobs"
echo "-q: compile the QT GUI (not working at present)"
echo "-d: force a rebuild of dependencies"
echo "-u: run the unit tests when done"
echo "-g: include debug symbols"
echo "-h: show help"
exit 1
}
REBUILD_DEPENDENCIES=false
RUN_UNIT_TESTS=false
COMPILE_WITH_DEBUG=false
DO_NOT_COMPILE_THE_GUI="NO_QT=1"
WITH_COMPILE_THE_GUI=no
if test -z $PARALLEL_JOBS; then
PARALLEL_JOBS=$(expr $(getconf _NPROCESSORS_ONLN) / 2 + 1)
fi
while getopts j:qdugh FLAG; do
case ${FLAG} in
j)
PARALLEL_JOBS=$OPTARG
;;
q)
DO_NOT_COMPILE_THE_GUI=
WITH_COMPILE_THE_GUI=qt5
;;
g)
COMPILE_WITH_DEBUG=true
;;
u)
RUN_UNIT_TESTS=true
;;
d)
REBUILD_DEPENDENCIES=true
;;
h)
HELP
;;
\?)
HELP
;;
esac
done
echo "Compiling with ${PARALLEL_JOBS} jobs in parallel."
BUILD_FLAGS=(CXXFLAGS="-O3 -march=native")
if test "$COMPILE_WITH_DEBUG" = true; then
BUILD_FLAGS=(--with-debug CXXFLAGS="-Og -g")
fi
cd depends
if test "$REBUILD_DEPENDENCIES" = true; then
make clean
fi
make -j${PARALLEL_JOBS} ${DO_NOT_COMPILE_THE_GUI} V=1
cd ..
LC_ALL=C autoreconf --install
CONFIG_SITE=$(pwd)/depends/$($(pwd)/depends/config.guess)/share/config.site ./configure --enable-reduce-exports \
--enable-static --disable-shared --with-pic --with-gui=${WITH_COMPILE_THE_GUI} "${BUILD_FLAGS[@]}"
if test $? -eq 0; then
make -j${PARALLEL_JOBS}
fi
if test $? -eq 0 && "$RUN_UNIT_TESTS" = true; then
./src/test/test_lbrycrd
fi

View file

@ -2,10 +2,10 @@ dnl require autoconf 2.60 (AS_ECHO/AS_ECHO_N)
AC_PREREQ([2.60])
define(_CLIENT_VERSION_MAJOR, 0)
define(_CLIENT_VERSION_MINOR, 17)
define(_CLIENT_VERSION_REVISION, 3)
define(_CLIENT_VERSION_BUILD, 3)
define(_CLIENT_VERSION_REVISION, 1)
define(_CLIENT_VERSION_BUILD, 0)
define(_CLIENT_VERSION_IS_RELEASE, true)
define(_COPYRIGHT_YEAR, 2021)
define(_COPYRIGHT_YEAR, 2019)
define(_COPYRIGHT_HOLDERS,[The %s developers])
define(_COPYRIGHT_HOLDERS_SUBSTITUTION,[[LBRYcrd Core]])
AC_INIT([LBRYcrd Core],[_CLIENT_VERSION_MAJOR._CLIENT_VERSION_MINOR._CLIENT_VERSION_REVISION],[https://github.com/lbryio/lbrycrd/issues],[lbrycrd],[https://lbry.com/])
@ -646,10 +646,9 @@ if test x$ac_cv_sys_large_files != x &&
CPPFLAGS="$CPPFLAGS -D_LARGE_FILES=$ac_cv_sys_large_files"
fi
AS_IF([test x$enable_static != x && test x$LDFLAGS != xdarwin], [
# darwin should be using -stdlib=libc++ (and may need a -static instead)
if test x$enable_static != x; then
AX_CHECK_LINK_FLAG([[-static-libstdc++]], [LDFLAGS="$LDFLAGS -static-libstdc++"])
])
fi
AX_CHECK_LINK_FLAG([[-Wl,--large-address-aware]], [LDFLAGS="$LDFLAGS -Wl,--large-address-aware"])
AX_GCC_FUNC_ATTRIBUTE([visibility])
@ -703,12 +702,10 @@ if test x$TARGET_OS != xwindows; then
AX_CHECK_COMPILE_FLAG([-fPIC],[PIC_FLAGS="-fPIC"])
fi
# All versions of gcc that we commonly use for building are subject to bug
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=90348. To work around that, set
# -fstack-reuse=none for all gcc builds. (Only gcc understands this flag)
AX_CHECK_COMPILE_FLAG([-fstack-reuse=none],[HARDENED_CXXFLAGS="$HARDENED_CXXFLAGS -fstack-reuse=none"])
if test x$use_hardening != xno; then
use_hardening=yes
AX_CHECK_COMPILE_FLAG([-Wstack-protector],[HARDENED_CXXFLAGS="$HARDENED_CXXFLAGS -Wstack-protector"])
AX_CHECK_COMPILE_FLAG([-fstack-protector-all],[HARDENED_CXXFLAGS="$HARDENED_CXXFLAGS -fstack-protector-all"])
AX_CHECK_PREPROC_FLAG([-D_FORTIFY_SOURCE=2],[
AX_CHECK_PREPROC_FLAG([-U_FORTIFY_SOURCE],[
@ -728,10 +725,6 @@ if test x$use_hardening != xno; then
*mingw*)
AC_CHECK_LIB([ssp], [main],, AC_MSG_ERROR(libssp missing))
;;
*)
AX_CHECK_COMPILE_FLAG([-Wstack-protector],[HARDENED_CXXFLAGS="$HARDENED_CXXFLAGS -Wstack-protector"])
AX_CHECK_COMPILE_FLAG([-fstack-protector-all],[HARDENED_CXXFLAGS="$HARDENED_CXXFLAGS -fstack-protector-all"])
;;
esac
fi
@ -978,7 +971,8 @@ fi
if test x$use_boost = xyes; then
BOOST_LIBS="$BOOST_LDFLAGS $BOOST_SYSTEM_LIB $BOOST_FILESYSTEM_LIB $BOOST_LOCALE_LIB $BOOST_THREAD_LIB $BOOST_CHRONO_LIB"
BOOST_LIBS="$BOOST_LDFLAGS $BOOST_SYSTEM_LIB $BOOST_FILESYSTEM_LIB $BOOST_THREAD_LIB $BOOST_CHRONO_LIB $BOOST_LOCALE_LIB"
dnl If boost (prior to 1.57) was built without c++11, it emulated scoped enums
dnl using c++98 constructs. Unfortunately, this implementation detail leaked into
@ -1068,25 +1062,19 @@ fi
fi
# the plan for dealing with ICU:
# if the user specifies an ICU prefix, use that one.
# if the user did not specify an ICU prefix but did specify a general prefix use that one.
# otherwise use pkg_config if it's available.
# well, actually, things seem to work fine without this fallback to pkg_config so we'll leave that out for now.
# note: in order to use AC_CHECK_LIB we have to override CPPFLAGS and LDFLAGS
# however, we don't want to keep those overridden after our checks;
# we want to rely on ICU_CPPFLAGS and ICU_LIBS after that
# to further complicate matters there are at least three different naming conventions for ICU libraries
# to simplify things we'll just check one from each convention
AS_IF([test "x${prefix}" != "xNONE" && test "x$ICU_PREFIX" == "xauto"], [
ICU_PREFIX="${prefix}"
AS_IF([test "x$ICU_PREFIX" != xauto], [
ICU_CPPFLAGS="-I$ICU_PREFIX/include"
ICU_LIBS="-L$ICU_PREFIX/lib -licui18n -licuuc -licudata -ldl"
# PKG_CONFIG_PATH="${ICU_PREFIX}/lib/pkgconfig:$PKG_CONFIG_PATH"
# export PKG_CONFIG_PATH
])
AS_IF([test "x$ICU_PREFIX" != "xauto"], [
ICU_CPPFLAGS="$(PKG_CONFIG_SYSROOT_DIR=/ PKG_CONFIG_LIBDIR=$ICU_PREFIX/lib/pkgconfig PKG_CONFIG_PATH=$ICU_PREFIX/share/pkgconfig pkg-config icu-io icu-uc icu-i18n --cflags)"
ICU_LIBS="$(PKG_CONFIG_SYSROOT_DIR=/ PKG_CONFIG_LIBDIR=$ICU_PREFIX/lib/pkgconfig PKG_CONFIG_PATH=$ICU_PREFIX/share/pkgconfig pkg-config icu-io icu-uc icu-i18n --libs)"
])
AC_MSG_NOTICE([Using ICU_CPPFLAGS $ICU_CPPFLAGS])
AC_MSG_NOTICE([Using ICU_LIBS $ICU_LIBS])
CPPFLAGS="$CPPFLAGS $ICU_CPPFLAGS"
ORIG_LDFLAGS=$LDFLAGS
LDFLAGS="$LDFLAGS $ICU_LIBS"
if test x$use_pkgconfig = xyes; then
: dnl
@ -1115,14 +1103,20 @@ if test x$use_pkgconfig = xyes; then
else
AC_DEFINE_UNQUOTED([ENABLE_ZMQ],[0],[Define to 1 to enable ZMQ functions])
fi
if test "x$ICU_PREFIX" == "xauto"; then
PKG_CHECK_MODULES([ICU], [icu-io, icu-uc, icu-i18n])
fi
]
)
else # probably compiling on Windows or cross-compiling for it:
AC_MSG_NOTICE([Configuring for MinGW])
else # compiling on Window:
AC_MSG_NOTICE([Configuring for Windows])
CPPFLAGS="$CPPFLAGS -Ix86_64-w64-mingw32/include"
LDFLAGS="$LDFLAGS -Lx86_64-w64-mingw32/lib"
AC_CHECK_HEADER([unicode/errorcode.h],,AC_MSG_ERROR(libicu headers missing))
AC_CHECK_LIB([icudata], [main], ICU_LIBS=$ICU_LIBS,
AC_CHECK_LIB([icu18n], [main],ICU_LIBS=$ICU_LIBS, ICU_LIBS="-L$ICU_PREFIX/lib -lsicuio -lsicuin -lsiculx -lsicule -lsicuuc -lsicudt"))
AC_MSG_NOTICE([Using ICU_LIBS=$ICU_LIBS])
LDFLAGS="$ORIG_LDFLAGS $ICU_LIBS"
AC_CHECK_LIB([sicudt], [main], ICU_LIBS=$ICU_LIBS, AC_MSG_ERROR(icu libraries missing))
AC_CHECK_HEADER([openssl/crypto.h],,AC_MSG_ERROR(libcrypto headers missing))
AC_CHECK_LIB([crypto], [main],CRYPTO_LIBS=-lcrypto, AC_MSG_ERROR(libcrypto missing))
@ -1168,9 +1162,6 @@ else # probably compiling on Windows or cross-compiling for it:
fi
fi
AC_MSG_NOTICE([Using ICU_CPPFLAGS="$ICU_CPPFLAGS"])
AC_MSG_NOTICE([Using ICU_LIBS="$ICU_LIBS"])
save_CXXFLAGS="${CXXFLAGS}"
CXXFLAGS="${CXXFLAGS} ${CRYPTO_CFLAGS} ${SSL_CFLAGS}"
AC_CHECK_DECLS([EVP_MD_CTX_new],,,[AC_INCLUDES_DEFAULT

View file

@ -1,185 +0,0 @@
cmake_minimum_required(VERSION 3.10)
project(lbrycrd)
set(CMAKE_CXX_STANDARD 11)
include(cmake/CPM.cmake)
include(ExternalProject)
set(OPTIONS "" CACHE STRING "lbrycrdd configure options")
set(CPPFLAGS "" CACHE STRING "lbrycrdd compiler options")
set(LDFLAGS "" CACHE STRING "lbrycrdd linker options")
set(DISABLE_TESTS OFF CACHE BOOL "compilation without tests")
set(DISABLE_WALLET OFF CACHE BOOL "compilation without wallet support")
set(DISABLE_BENCH OFF CACHE BOOL "compilation without bench support")
if(NOT ${CPM_USE_LOCAL_PACKAGES})
set(OPTIONS "${OPTIONS} --enable-static --disable-shared")
else()
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/cmake")
endif()
set(OPTIONS "--without-gui ${OPTIONS} --with-pic")
if (${DISABLE_TESTS})
set(OPTIONS "${OPTIONS} --disable-tests")
endif()
if (${DISABLE_WALLET})
set(OPTIONS "${OPTIONS} --disable-wallet")
endif()
if (${DISABLE_BENCH})
set(OPTIONS "${OPTIONS} --disable-bench")
endif()
string(TOLOWER ${CMAKE_SYSTEM_NAME}-${CMAKE_SYSTEM_PROCESSOR} ARCH)
CPMAddPackage(
NAME OpenSSL
GITHUB_REPOSITORY openssl/openssl
VERSION 1.0.2
GIT_TAG OpenSSL_1_0_2r
DOWNLOAD_ONLY TRUE
)
if(OpenSSL_ADDED)
ExternalProject_Add(OpenSSL
PREFIX openssl
SOURCE_DIR ${OpenSSL_SOURCE_DIR}
CONFIGURE_COMMAND ${OpenSSL_SOURCE_DIR}/Configure ${ARCH} no-shared no-dso no-engines -fPIC --prefix=<INSTALL_DIR>
BUILD_IN_SOURCE 1
)
set(DEPENDS ${DEPENDS} OpenSSL)
ExternalProject_Get_Property(OpenSSL INSTALL_DIR)
set(LDFLAGS "${LDFLAGS} -L${INSTALL_DIR}/lib")
set(CPPFLAGS "${CPPFLAGS} -I${INSTALL_DIR}/include")
set(OPENSSL_CPPFLAGS "CPPFLAGS=-I${INSTALL_DIR}/include")
set(OPENSSL_LDFLAGS "LDFLAGS=-L${INSTALL_DIR}/lib")
endif(OpenSSL_ADDED)
CPMAddPackage(
NAME Libevent
GITHUB_REPOSITORY libevent/libevent
VERSION 2.1.8
GIT_TAG release-2.1.8-stable
DOWNLOAD_ONLY TRUE
)
if(Libevent_ADDED)
ExternalProject_Add(Libevent
PREFIX libevent
DEPENDS ${DEPENDS}
SOURCE_DIR ${Libevent_SOURCE_DIR}
CONFIGURE_COMMAND ${Libevent_SOURCE_DIR}/autogen.sh
&& ${Libevent_SOURCE_DIR}/configure ${OPENSSL_CPPFLAGS} --enable-cxx --disable-shared --with-pic ${OPENSSL_LDFLAGS} --prefix=<INSTALL_DIR>
BUILD_IN_SOURCE 1
)
set(DEPENDS ${DEPENDS} Libevent)
ExternalProject_Get_Property(Libevent INSTALL_DIR)
set(LDFLAGS "${LDFLAGS} -L${INSTALL_DIR}/lib")
set(CPPFLAGS "${CPPFLAGS} -I${INSTALL_DIR}/include")
endif(Libevent_ADDED)
if(NOT ${DISABLE_WALLET})
CPMAddPackage(
NAME BerkeleyDB
VERSION 4.8.30
URL https://download.oracle.com/berkeley-db/db-4.8.30.NC.zip
URL_HASH SHA256=43ecd76886992ea416fdadc54b7f2b83ef249d9a6964bd07708ccae42d0226ce
DOWNLOAD_ONLY TRUE
)
if(NOT ${BerkeleyDB_VERSION} VERSION_LESS "5.0")
set(OPTIONS "${OPTIONS} --with-incompatible-bdb")
endif()
if(BerkeleyDB_ADDED)
ExternalProject_Add(BerkeleyDB
PREFIX bdb
SOURCE_DIR ${BerkeleyDB_SOURCE_DIR}
PATCH_COMMAND sed -i "s/__atomic_compare_exchange/__atomic_compare_exchange_db/" ${BerkeleyDB_SOURCE_DIR}/dbinc/atomic.h
CONFIGURE_COMMAND ${BerkeleyDB_SOURCE_DIR}/dist/configure --enable-cxx --disable-shared --with-pic --prefix=<INSTALL_DIR>
)
set(DEPENDS ${DEPENDS} BerkeleyDB)
ExternalProject_Get_Property(BerkeleyDB INSTALL_DIR)
set(LDFLAGS "${LDFLAGS} -L${INSTALL_DIR}/lib")
set(CPPFLAGS "${CPPFLAGS} -I${INSTALL_DIR}/include")
endif(BerkeleyDB_ADDED)
endif()
set(BOOST_LIBS chrono,filesystem,system,locale,thread)
string(REPLACE "," ";" BOOST_COMPONENTS ${BOOST_LIBS})
if(NOT ${DISABLE_TESTS})
set(BOOST_LIBS ${BOOST_LIBS},test)
set(BOOST_COMPONENTS ${BOOST_COMPONENTS};unit_test_framework)
endif()
CPMAddPackage(
NAME Boost
GITHUB_REPOSITORY boostorg/boost
VERSION 1.64.0
COMPONENTS ${BOOST_COMPONENTS}
GIT_TAG boost-1.69.0
GIT_SUBMODULES libs/* tools/*
DOWNLOAD_ONLY TRUE
)
# if boost is found system wide we expect to be compiled against icu, so we can skip it
if(Boost_ADDED)
CPMAddPackage(
NAME ICU
GITHUB_REPOSITORY unicode-org/icu
VERSION 63.2
GIT_TAG release-63-2
DOWNLOAD_ONLY TRUE
)
if(ICU_ADDED)
ExternalProject_Add(ICU
PREFIX icu
SOURCE_DIR ${ICU_SOURCE_DIR}
CONFIGURE_COMMAND ${ICU_SOURCE_DIR}/icu4c/source/configure --disable-extras --disable-strict --enable-static
--disable-shared --disable-tests --disable-samples --disable-dyload --disable-layoutex CFLAGS=-fPIC CPPFLAGS=-fPIC --prefix=<INSTALL_DIR>
)
set(DEPENDS ${DEPENDS} ICU)
ExternalProject_Get_Property(ICU INSTALL_DIR)
set(ICU_PATH ${INSTALL_DIR})
set(OPTIONS "${OPTIONS} --with-icu=${ICU_PATH}")
set(LDFLAGS "${LDFLAGS} -L${ICU_PATH}/lib")
set(CPPFLAGS "${CPPFLAGS} -I${ICU_PATH}/include")
endif(ICU_ADDED)
ExternalProject_Add(Boost
PREFIX boost
DEPENDS ${DEPENDS}
SOURCE_DIR ${Boost_SOURCE_DIR}
CONFIGURE_COMMAND ${Boost_SOURCE_DIR}/bootstrap.sh --with-icu=${ICU_PATH} --with-libraries=${BOOST_LIBS} && ${Boost_SOURCE_DIR}/b2 headers
BUILD_COMMAND ${Boost_SOURCE_DIR}/b2 install threading=multi -sNO_BZIP2=1 -sNO_ZLIB=1 link=static linkflags="-L${ICU_PATH}/lib -licuio -licuuc -licudata -licui18n" cxxflags=-fPIC boost.locale.iconv=off boost.locale.posix=off boost.locale.icu=on boost.locale.std=off -sICU_PATH=${ICU_PATH} --prefix=<INSTALL_DIR>
INSTALL_COMMAND ""
BUILD_IN_SOURCE 1
)
set(DEPENDS ${DEPENDS} Boost)
ExternalProject_Get_Property(Boost INSTALL_DIR)
set(OPTIONS "${OPTIONS} --with-boost=${INSTALL_DIR}")
set(LDFLAGS "${LDFLAGS} -L${INSTALL_DIR}/lib")
set(CPPFLAGS "${CPPFLAGS} -I${INSTALL_DIR}/include")
set_property(DIRECTORY PROPERTY ADDITIONAL_MAKE_CLEAN_FILES ${Boost_SOURCE_DIR}/bin.v2)
endif(Boost_ADDED)
set(CPPFLAGS "${CPPFLAGS} -Wno-parentheses -Wno-unused-local-typedefs -Wno-deprecated -Wno-implicit-fallthrough -Wno-unused-parameter")
separate_arguments(OPTIONS)
ExternalProject_Add(lbrycrdd
PREFIX lbrycrdd
DEPENDS ${DEPENDS}
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../..
CONFIGURE_COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/../../autogen.sh
&& ${CMAKE_CURRENT_SOURCE_DIR}/../../configure ${OPTIONS} CPPFLAGS=${CPPFLAGS} LDFLAGS=${LDFLAGS} --prefix=<INSTALL_DIR>
BUILD_IN_SOURCE 1
BUILD_ALWAYS 1
)

View file

@ -1,210 +0,0 @@
# TheLartians/CPM - A simple Git dependency manager
# =================================================
# See https://github.com/TheLartians/CPM for usage and update instructions.
#
# MIT License
# -----------
#[[
Copyright (c) 2019 Lars Melchior
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
]]
cmake_minimum_required(VERSION 3.10 FATAL_ERROR)
set(CURRENT_CPM_VERSION 0.11.1)
if(CPM_DIRECTORY)
if(NOT ${CPM_DIRECTORY} MATCHES ${CMAKE_CURRENT_LIST_DIR})
if (${CPM_VERSION} VERSION_LESS ${CURRENT_CPM_VERSION})
CPM_HANDLE_OLD_VERSION(${CURRENT_CPM_VERSION})
endif()
return()
endif()
endif()
set(CPM_VERSION ${CURRENT_CPM_VERSION} CACHE INTERNAL "")
set(CPM_DIRECTORY ${CMAKE_CURRENT_LIST_DIR} CACHE INTERNAL "")
set(CPM_PACKAGES "" CACHE INTERNAL "")
option(CPM_USE_LOCAL_PACKAGES "Use locally installed packages (find_package)" ON)
option(CPM_LOCAL_PACKAGES_ONLY "Use only locally installed packages" OFF)
include(FetchContent)
include(CMakeParseArguments)
# Initialize logging prefix
if(NOT CPM_INDENT)
set(CPM_INDENT "CPM:")
endif()
# The main workhorse of CPM
function(CPMAddPackage)
set(oneValueArgs
NAME
VERSION
GIT_TAG
DOWNLOAD_ONLY
GITHUB_REPOSITORY
GITLAB_REPOSITORY
)
set(multiValueArgs
OPTIONS
COMPONENTS
)
cmake_parse_arguments(CPM_ARGS "" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if(${CPM_USE_LOCAL_PACKAGES} OR ${CPM_LOCAL_PACKAGES_ONLY})
find_package(${CPM_ARGS_NAME} ${CPM_ARGS_VERSION} OPTIONAL_COMPONENTS ${CPM_ARGS_COMPONENTS} QUIET)
if(${CPM_ARGS_NAME}_FOUND)
message(STATUS "CPM: adding local package ${CPM_ARGS_NAME}@${${CPM_ARGS_NAME}_VERSION}")
set(${CPM_ARGS_NAME}_VERSION "${${CPM_ARGS_NAME}_VERSION}" PARENT_SCOPE)
return()
endif()
if(${CPM_LOCAL_PACKAGES_ONLY})
message(SEND_ERROR "CPM: ${CPM_ARGS_NAME} not found via find_package(${CPM_ARGS_NAME} ${CPM_ARGS_VERSION})")
endif()
endif()
if (NOT CPM_ARGS_VERSION)
set(CPM_ARGS_VERSION 0)
endif()
if (NOT CPM_ARGS_GIT_TAG)
set(CPM_ARGS_GIT_TAG v${CPM_ARGS_VERSION})
endif()
list(APPEND CPM_ARGS_UNPARSED_ARGUMENTS GIT_TAG ${CPM_ARGS_GIT_TAG})
if(CPM_ARGS_DOWNLOAD_ONLY)
set(DOWNLOAD_ONLY ${CPM_ARGS_DOWNLOAD_ONLY})
else()
set(DOWNLOAD_ONLY NO)
endif()
if (CPM_ARGS_GITHUB_REPOSITORY)
list(APPEND CPM_ARGS_UNPARSED_ARGUMENTS GIT_REPOSITORY "https://github.com/${CPM_ARGS_GITHUB_REPOSITORY}.git")
endif()
if (CPM_ARGS_GITLAB_REPOSITORY)
list(APPEND CPM_ARGS_UNPARSED_ARGUMENTS GIT_REPOSITORY "https://gitlab.com/${CPM_ARGS_GITLAB_REPOSITORY}.git")
endif()
if (${CPM_ARGS_NAME} IN_LIST CPM_PACKAGES)
CPM_GET_PACKAGE_VERSION(${CPM_ARGS_NAME})
if(${CPM_PACKAGE_VERSION} VERSION_LESS ${CPM_ARGS_VERSION})
message(WARNING "${CPM_INDENT} requires a newer version of ${CPM_ARGS_NAME} (${CPM_ARGS_VERSION}) than currently included (${CPM_PACKAGE_VERSION}).")
endif()
if (CPM_ARGS_OPTIONS)
foreach(OPTION ${CPM_ARGS_OPTIONS})
CPM_PARSE_OPTION(${OPTION})
if(NOT "${${OPTION_KEY}}" STREQUAL ${OPTION_VALUE})
message(WARNING "${CPM_INDENT} ignoring package option for ${CPM_ARGS_NAME}: ${OPTION_KEY} = ${OPTION_VALUE} (${${OPTION_KEY}})")
endif()
endforeach()
endif()
CPM_FETCH_PACKAGE(${CPM_ARGS_NAME} ${DOWNLOAD_ONLY})
CPMGetProperties(${CPM_ARGS_NAME})
set(${CPM_ARGS_NAME}_VERSION ${CPM_ARGS_VERSION} PARENT_SCOPE)
set(${CPM_ARGS_NAME}_SOURCE_DIR "${${CPM_ARGS_NAME}_SOURCE_DIR}" PARENT_SCOPE)
set(${CPM_ARGS_NAME}_BINARY_DIR "${${CPM_ARGS_NAME}_BINARY_DIR}" PARENT_SCOPE)
set(${CPM_ARGS_NAME}_ADDED NO PARENT_SCOPE)
return()
endif()
CPMRegisterPackage(${CPM_ARGS_NAME} ${CPM_ARGS_VERSION})
if (CPM_ARGS_OPTIONS)
foreach(OPTION ${CPM_ARGS_OPTIONS})
CPM_PARSE_OPTION(${OPTION})
set(${OPTION_KEY} ${OPTION_VALUE} CACHE INTERNAL "")
endforeach()
endif()
CPM_DECLARE_PACKAGE(${CPM_ARGS_NAME} ${CPM_ARGS_VERSION} ${CPM_ARGS_GIT_TAG} "${CPM_ARGS_UNPARSED_ARGUMENTS}")
CPM_FETCH_PACKAGE(${CPM_ARGS_NAME} ${DOWNLOAD_ONLY})
CPMGetProperties(${CPM_ARGS_NAME})
set(${CPM_ARGS_NAME}_VERSION ${CPM_ARGS_VERSION} PARENT_SCOPE)
set(${CPM_ARGS_NAME}_SOURCE_DIR "${${CPM_ARGS_NAME}_SOURCE_DIR}" PARENT_SCOPE)
set(${CPM_ARGS_NAME}_BINARY_DIR "${${CPM_ARGS_NAME}_BINARY_DIR}" PARENT_SCOPE)
set(${CPM_ARGS_NAME}_ADDED YES PARENT_SCOPE)
endfunction()
function (CPM_DECLARE_PACKAGE PACKAGE VERSION GIT_TAG)
message(STATUS "${CPM_INDENT} adding package ${PACKAGE}@${VERSION} (${GIT_TAG})")
FetchContent_Declare(
${PACKAGE}
${ARGN}
)
endfunction()
function (CPM_FETCH_PACKAGE PACKAGE DOWNLOAD_ONLY)
set(CPM_OLD_INDENT "${CPM_INDENT}")
set(CPM_INDENT "${CPM_INDENT} ${PACKAGE}:")
if(${DOWNLOAD_ONLY})
if(NOT "${PACKAGE}_POPULATED")
FetchContent_Populate(${PACKAGE})
endif()
else()
FetchContent_MakeAvailable(${PACKAGE})
endif()
set(CPM_INDENT "${CPM_OLD_INDENT}")
endfunction()
function (CPMGetProperties PACKAGE)
FetchContent_GetProperties(${PACKAGE})
string(TOLOWER ${PACKAGE} lpackage)
set(${PACKAGE}_SOURCE_DIR "${${lpackage}_SOURCE_DIR}" PARENT_SCOPE)
set(${PACKAGE}_BINARY_DIR "${${lpackage}_BINARY_DIR}" PARENT_SCOPE)
endfunction()
function(CPMRegisterPackage PACKAGE VERSION)
list(APPEND CPM_PACKAGES ${PACKAGE})
set(CPM_PACKAGES ${CPM_PACKAGES} CACHE INTERNAL "")
set("CPM_PACKAGE_${PACKAGE}_VERSION" ${VERSION} CACHE INTERNAL "")
endfunction()
function(CPM_GET_PACKAGE_VERSION PACKAGE)
set(CPM_PACKAGE_VERSION "${CPM_PACKAGE_${PACKAGE}_VERSION}" PARENT_SCOPE)
endfunction()
function(CPM_PARSE_OPTION OPTION)
string(REGEX MATCH "^[^ ]+" OPTION_KEY ${OPTION})
string(LENGTH ${OPTION_KEY} OPTION_KEY_LENGTH)
math(EXPR OPTION_KEY_LENGTH "${OPTION_KEY_LENGTH}+1")
string(SUBSTRING ${OPTION} "${OPTION_KEY_LENGTH}" "-1" OPTION_VALUE)
set(OPTION_KEY "${OPTION_KEY}" PARENT_SCOPE)
set(OPTION_VALUE "${OPTION_VALUE}" PARENT_SCOPE)
endfunction()
function (CPM_HANDLE_OLD_VERSION NEW_CPM_VERSION)
message(AUTHOR_WARNING "${CPM_INDENT} \
A dependency is using a more recent CPM (${NEW_CPM_VERSION}) than the current project (${CPM_VERSION}). \
It is recommended to upgrade CPM to the most recent version. \
See https://github.com/TheLartians/CPM for more information."
)
endfunction()

View file

@ -1,171 +0,0 @@
# Author: sum01 <sum01@protonmail.com>
# Git: https://github.com/sum01/FindBerkeleyDB
# Read the README.md for the full info.
# NOTE: If Berkeley DB ever gets a Pkg-config ".pc" file, add pkg_check_modules() here
# Checks if environment paths are empty, set them if they aren't
if(NOT "$ENV{BERKELEYDB_ROOT}" STREQUAL "")
set(_BERKELEYDB_HINTS "$ENV{BERKELEYDB_ROOT}")
elseif(NOT "$ENV{Berkeleydb_ROOT}" STREQUAL "")
set(_BERKELEYDB_HINTS "$ENV{Berkeleydb_ROOT}")
elseif(NOT "$ENV{BERKELEYDBROOT}" STREQUAL "")
set(_BERKELEYDB_HINTS "$ENV{BERKELEYDBROOT}")
else()
# Set just in case, as it's used regardless if it's empty or not
set(_BERKELEYDB_HINTS "")
endif()
# Allow user to pass a path instead of guessing
if(BerkeleyDB_ROOT_DIR)
set(_BERKELEYDB_PATHS "${BerkeleyDB_ROOT_DIR}")
elseif(CMAKE_SYSTEM_NAME MATCHES ".*[wW]indows.*")
# MATCHES is used to work on any devies with windows in the name
# Shameless copy-paste from FindOpenSSL.cmake v3.8
file(TO_CMAKE_PATH "$ENV{PROGRAMFILES}" _programfiles)
list(APPEND _BERKELEYDB_HINTS "${_programfiles}")
# There's actually production release and version numbers in the file path.
# For example, if they're on v6.2.32: C:/Program Files/Oracle/Berkeley DB 12cR1 6.2.32/
# But this still works to find it, so I'm guessing it can accept partial path matches.
foreach(_TARGET_BERKELEYDB_PATH "Oracle/Berkeley DB" "Berkeley DB")
list(APPEND _BERKELEYDB_PATHS
"${_programfiles}/${_TARGET_BERKELEYDB_PATH}"
"C:/Program Files (x86)/${_TARGET_BERKELEYDB_PATH}"
"C:/Program Files/${_TARGET_BERKELEYDB_PATH}"
"C:/${_TARGET_BERKELEYDB_PATH}"
)
endforeach()
else()
# Paths for anything other than Windows
# Cellar/berkeley-db is for macOS from homebrew installation
list(APPEND _BERKELEYDB_PATHS
"/usr"
"/usr/local"
"/usr/local/Cellar/berkeley-db"
"/opt"
"/opt/local"
)
endif()
# Find includes path
find_path(BerkeleyDB_INCLUDE_DIRS
NAMES "db.h"
HINTS ${_BERKELEYDB_HINTS}
PATH_SUFFIXES "include" "includes"
PATHS ${_BERKELEYDB_PATHS}
)
# Checks if the version file exists, save the version file to a var, and fail if there's no version file
if(BerkeleyDB_INCLUDE_DIRS)
# Read the version file db.h into a variable
file(READ "${BerkeleyDB_INCLUDE_DIRS}/db.h" _BERKELEYDB_DB_HEADER)
# Parse the DB version into variables to be used in the lib names
string(REGEX REPLACE ".*DB_VERSION_MAJOR ([0-9]+).*" "\\1" BerkeleyDB_VERSION_MAJOR "${_BERKELEYDB_DB_HEADER}")
string(REGEX REPLACE ".*DB_VERSION_MINOR ([0-9]+).*" "\\1" BerkeleyDB_VERSION_MINOR "${_BERKELEYDB_DB_HEADER}")
# Patch version example on non-crypto installs: x.x.xNC
string(REGEX REPLACE ".*DB_VERSION_PATCH ([0-9]+(NC)?).*" "\\1" BerkeleyDB_VERSION_PATCH "${_BERKELEYDB_DB_HEADER}")
else()
if(BerkeleyDB_FIND_REQUIRED)
# If the find_package(BerkeleyDB REQUIRED) was used, fail since we couldn't find the header
message(FATAL_ERROR "Failed to find Berkeley DB's header file \"db.h\"! Try setting \"BerkeleyDB_ROOT_DIR\" when initiating Cmake.")
elseif(NOT BerkeleyDB_FIND_QUIETLY)
message(WARNING "Failed to find Berkeley DB's header file \"db.h\"! Try setting \"BerkeleyDB_ROOT_DIR\" when initiating Cmake.")
endif()
# Set some garbage values to the versions since we didn't find a file to read
set(BerkeleyDB_VERSION_MAJOR "0")
set(BerkeleyDB_VERSION_MINOR "0")
set(BerkeleyDB_VERSION_PATCH "0")
endif()
# The actual returned/output version variable (the others can be used if needed)
set(BerkeleyDB_VERSION "${BerkeleyDB_VERSION_MAJOR}.${BerkeleyDB_VERSION_MINOR}.${BerkeleyDB_VERSION_PATCH}")
# Finds the target library for berkeley db, since they all follow the same naming conventions
macro(_berkeleydb_get_lib _BERKELEYDB_OUTPUT_VARNAME _TARGET_BERKELEYDB_LIB)
# Different systems sometimes have a version in the lib name...
# and some have a dash or underscore before the versions.
# CMake recommends to put unversioned names before versioned names
find_library(${_BERKELEYDB_OUTPUT_VARNAME}
NAMES
"${_TARGET_BERKELEYDB_LIB}"
"lib${_TARGET_BERKELEYDB_LIB}"
"lib${_TARGET_BERKELEYDB_LIB}${BerkeleyDB_VERSION_MAJOR}.${BerkeleyDB_VERSION_MINOR}"
"lib${_TARGET_BERKELEYDB_LIB}-${BerkeleyDB_VERSION_MAJOR}.${BerkeleyDB_VERSION_MINOR}"
"lib${_TARGET_BERKELEYDB_LIB}_${BerkeleyDB_VERSION_MAJOR}.${BerkeleyDB_VERSION_MINOR}"
"lib${_TARGET_BERKELEYDB_LIB}${BerkeleyDB_VERSION_MAJOR}${BerkeleyDB_VERSION_MINOR}"
"lib${_TARGET_BERKELEYDB_LIB}-${BerkeleyDB_VERSION_MAJOR}${BerkeleyDB_VERSION_MINOR}"
"lib${_TARGET_BERKELEYDB_LIB}_${BerkeleyDB_VERSION_MAJOR}${BerkeleyDB_VERSION_MINOR}"
"lib${_TARGET_BERKELEYDB_LIB}${BerkeleyDB_VERSION_MAJOR}"
"lib${_TARGET_BERKELEYDB_LIB}-${BerkeleyDB_VERSION_MAJOR}"
"lib${_TARGET_BERKELEYDB_LIB}_${BerkeleyDB_VERSION_MAJOR}"
HINTS ${_BERKELEYDB_HINTS}
PATH_SUFFIXES
"lib"
"lib64"
"libs"
"libs64"
PATHS ${_BERKELEYDB_PATHS}
)
# If the library was found, add it to our list of libraries
if(${_BERKELEYDB_OUTPUT_VARNAME})
# If found, append to our libraries variable
# The ${{}} is because the first expands to target the real variable, the second expands the variable's contents...
# and the real variable's contents is the path to the lib. Thus, it appends the path of the lib to BerkeleyDB_LIBRARIES.
list(APPEND BerkeleyDB_LIBRARIES "${${_BERKELEYDB_OUTPUT_VARNAME}}")
endif()
endmacro()
# Find and set the paths of the specific library to the variable
_berkeleydb_get_lib(BerkeleyDB_LIBRARY "db")
# NOTE: Windows doesn't have a db_cxx lib, but instead compiles the cxx code into the "db" lib
_berkeleydb_get_lib(BerkeleyDB_Cxx_LIBRARY "db_cxx")
# NOTE: I don't think Linux/Unix gets an SQL lib
_berkeleydb_get_lib(BerkeleyDB_Sql_LIBRARY "db_sql")
_berkeleydb_get_lib(BerkeleyDB_Stl_LIBRARY "db_stl")
# Needed for find_package_handle_standard_args()
include(FindPackageHandleStandardArgs)
# Fails if required vars aren't found, or if the version doesn't meet specifications.
find_package_handle_standard_args(BerkeleyDB
FOUND_VAR BerkeleyDB_FOUND
REQUIRED_VARS
BerkeleyDB_INCLUDE_DIRS
BerkeleyDB_LIBRARY
BerkeleyDB_LIBRARIES
VERSION_VAR BerkeleyDB_VERSION
)
# Only show the variables in the GUI if they click "advanced".
# Does nothing when using the CLI
mark_as_advanced(FORCE
BerkeleyDB_FOUND
BerkeleyDB_INCLUDE_DIRS
BerkeleyDB_LIBRARIES
BerkeleyDB_VERSION
BerkeleyDB_VERSION_MAJOR
BerkeleyDB_VERSION_MINOR
BerkeleyDB_VERSION_PATCH
BerkeleyDB_LIBRARY
BerkeleyDB_Cxx_LIBRARY
BerkeleyDB_Stl_LIBRARY
BerkeleyDB_Sql_LIBRARY
)
# Create an imported lib for easy linking by external projects
if(BerkeleyDB_FOUND AND BerkeleyDB_LIBRARIES AND NOT TARGET Oracle::BerkeleyDB)
add_library(Oracle::BerkeleyDB UNKNOWN IMPORTED)
set_target_properties(Oracle::BerkeleyDB PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES "${BerkeleyDB_INCLUDE_DIRS}"
IMPORTED_LOCATION "${BerkeleyDB_LIBRARY}"
INTERFACE_LINK_LIBRARIES "${BerkeleyDB_LIBRARIES}"
)
endif()
include(FindPackageMessage)
# A message that tells the user what includes/libs were found, and obeys the QUIET command.
find_package_message(BerkeleyDB
"Found BerkeleyDB libraries: ${BerkeleyDB_LIBRARIES}"
"[${BerkeleyDB_LIBRARIES}[${BerkeleyDB_INCLUDE_DIRS}]]"
)

View file

@ -1,97 +0,0 @@
# - Try to find libevent
#.rst
# FindLibevent
# ------------
#
# Find Libevent include directories and libraries. Invoke as::
#
# find_package(Libevent
# [version] [EXACT] # Minimum or exact version
# [REQUIRED] # Fail if Libevent is not found
# [COMPONENT <C>...]) # Libraries to look for
#
# Valid components are one or more of:: libevent core extra pthreads openssl.
# Note that 'libevent' contains both core and extra. You must specify one of
# them for the other components.
#
# This module will define the following variables::
#
# LIBEVENT_FOUND - True if headers and requested libraries were found
# LIBEVENT_INCLUDE_DIRS - Libevent include directories
# LIBEVENT_LIBRARIES - Libevent libraries to be linked
# LIBEVENT_<C>_FOUND - Component <C> was found (<C> is uppercase)
# LIBEVENT_<C>_LIBRARY - Library to be linked for Libevent component <C>.
find_package(PkgConfig QUIET)
pkg_check_modules(PC_LIBEVENT QUIET libevent)
# Look for the Libevent 2.0 or 1.4 headers
find_path(LIBEVENT_INCLUDE_DIR
NAMES
event2/event-config.h
event-config.h
HINTS
${PC_LIBEVENT_INCLUDE_DIRS}
)
if(LIBEVENT_INCLUDE_DIR)
set(_version_regex "^#define[ \t]+_EVENT_VERSION[ \t]+\"([^\"]+)\".*")
if(EXISTS "${LIBEVENT_INCLUDE_DIR}/event2/event-config.h")
# Libevent 2.0
file(STRINGS "${LIBEVENT_INCLUDE_DIR}/event2/event-config.h"
LIBEVENT_VERSION REGEX "${_version_regex}")
if("${LIBEVENT_VERSION}" STREQUAL "")
set(LIBEVENT_VERSION ${PC_LIBEVENT_VERSION})
endif()
else()
# Libevent 1.4
file(STRINGS "${LIBEVENT_INCLUDE_DIR}/event-config.h"
LIBEVENT_VERSION REGEX "${_version_regex}")
endif()
string(REGEX REPLACE "${_version_regex}" "\\1"
LIBEVENT_VERSION "${LIBEVENT_VERSION}")
unset(_version_regex)
endif()
set(_LIBEVENT_REQUIRED_VARS)
foreach(COMPONENT ${Libevent_FIND_COMPONENTS})
set(_LIBEVENT_LIBNAME libevent)
# Note: compare two variables to avoid a CMP0054 policy warning
if(COMPONENT STREQUAL _LIBEVENT_LIBNAME)
set(_LIBEVENT_LIBNAME event)
else()
set(_LIBEVENT_LIBNAME "event_${COMPONENT}")
endif()
string(TOUPPER "${COMPONENT}" COMPONENT_UPPER)
find_library(LIBEVENT_${COMPONENT_UPPER}_LIBRARY
NAMES ${_LIBEVENT_LIBNAME}
HINTS ${PC_LIBEVENT_LIBRARY_DIRS}
)
if(LIBEVENT_${COMPONENT_UPPER}_LIBRARY)
set(Libevent_${COMPONENT}_FOUND 1)
endif()
list(APPEND _LIBEVENT_REQUIRED_VARS LIBEVENT_${COMPONENT_UPPER}_LIBRARY)
endforeach()
unset(_LIBEVENT_LIBNAME)
include(FindPackageHandleStandardArgs)
# handle the QUIETLY and REQUIRED arguments and set LIBEVENT_FOUND to TRUE
# if all listed variables are TRUE and the requested version matches.
find_package_handle_standard_args(Libevent REQUIRED_VARS
${_LIBEVENT_REQUIRED_VARS}
LIBEVENT_INCLUDE_DIR
VERSION_VAR LIBEVENT_VERSION
HANDLE_COMPONENTS)
if(LIBEVENT_FOUND)
set(LIBEVENT_INCLUDE_DIRS ${LIBEVENT_INCLUDE_DIR})
set(LIBEVENT_LIBRARIES)
foreach(COMPONENT ${Libevent_FIND_COMPONENTS})
string(TOUPPER "${COMPONENT}" COMPONENT_UPPER)
list(APPEND LIBEVENT_LIBRARIES ${LIBEVENT_${COMPONENT_UPPER}_LIBRARY})
set(LIBEVENT_${COMPONENT_UPPER}_FOUND ${Libevent_${COMPONENT}_FOUND})
endforeach()
endif()
mark_as_advanced(LIBEVENT_INCLUDE_DIR ${_LIBEVENT_REQUIRED_VARS})
unset(_LIBEVENT_REQUIRED_VARS)

View file

@ -59,13 +59,12 @@ def get_obj_from_dirty_text(full_object: str):
last_name = property_name
elif len(left) > 1:
match = re.match(r'^(\[)?"(?P<name>\w.*?)"(\])?.*', left)
if match is not None:
last_name = match.group('name')
if match.group(1) is not None and match.group(3) is not None:
left = '['
property_refined_type = 'string'
if 'string' not in line:
raise NotImplementedError('Not implemented: ' + line)
last_name = match.group('name')
if match.group(1) is not None and match.group(3) is not None:
left = '['
property_refined_type = 'string'
if 'string' not in line:
raise NotImplementedError('Not implemented: ' + line)
if left.endswith('['):
object_stack.append({'type': 'array', 'items': {'type': property_refined_type}})
@ -97,15 +96,7 @@ def get_obj_from_dirty_text(full_object: str):
ret = obj
if ret is not None:
if i + 1 < len(lines) - 1:
print('WARNING: unparsable data...', file=sys.stderr)
lines = lines[i+1:]
if not lines[0]:
lines = lines[1:]
nret = get_obj_from_dirty_text("\n".join(lines))
if not nret:
nret = get_obj_from_dirty_text("\n".join(lines[1:]))
if nret:
ret.update(nret)
print('Ignoring this data (below the parsed object): ' + "\n".join(lines[i+1:]), file=sys.stderr)
return ret
except Exception as e:
print('Exception: ' + str(e), file=sys.stderr)
@ -122,7 +113,7 @@ def get_type(arg_type: str, full_line: str):
arg_type = arg_type.lower()
if 'array' in arg_type:
return 'array', required, None
if 'numeric' in arg_type or 'number' in arg_type:
if 'numeric' in arg_type:
return 'number', required, None
if 'bool' in arg_type:
return 'boolean', required, None
@ -132,11 +123,6 @@ def get_type(arg_type: str, full_line: str):
properties = get_obj_from_dirty_text(full_line) if full_line is not None else None
return 'object', required, properties
if arg_type.startswith('optional'):
return 'optional', required, None
if arg_type.startswith('json'):
return 'json', required, None
print('Unable to derive type from: ' + arg_type, file=sys.stderr)
return None, False, None
@ -288,4 +274,4 @@ def write_api():
if __name__ == '__main__':
write_api()
write_api()

View file

@ -13,22 +13,17 @@ def get_type(arg_type, full_line):
if arg_type is None:
return 'string'
arg_type = arg_type.lower().split(',')[0].strip()
arg_type = arg_type.lower()
if 'numeric' in arg_type:
return 'number'
if 'bool' in arg_type:
return 'boolean'
if 'array' in arg_type:
return 'array'
if 'string' in arg_type:
return 'string'
if 'object' in arg_type:
return 'object'
supported_types = ['number', 'string', 'object', 'array', 'optional']
if arg_type in supported_types:
return arg_type
print("get_type: WARNING", arg_type, "is not supported type", file=sys.stderr)
return arg_type
raise Exception('Not implemented: ' + arg_type)
def parse_params(args):
@ -39,7 +34,7 @@ def parse_params(args):
continue
arg_parsed = re_argline.fullmatch(line)
if arg_parsed is None:
continue
raise Exception("Unparsable argument: " + line)
arg_name, arg_type, arg_desc = arg_parsed.group('name', 'type', 'desc')
if not arg_type:
raise Exception('Not implemented: ' + arg_type)

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -36,13 +36,13 @@ if [ -z "${CODESIGN_ALLOCATE}" ]; then
fi
find ${TEMPDIR} -name "*.sign" | while read i; do
SIZE=$(stat -c %s "${i}")
TARGET_FILE="$(echo "${i}" | sed 's/\.sign$//')"
SIZE=`stat -c %s "${i}"`
TARGET_FILE="`echo "${i}" | sed 's/\.sign$//'`"
echo "Allocating space for the signature of size ${SIZE} in ${TARGET_FILE}"
${CODESIGN_ALLOCATE} -i "${TARGET_FILE}" -a ${ARCH} ${SIZE} -o "${i}.tmp"
OFFSET=$(${PAGESTUFF} "${i}.tmp" -p | tail -2 | grep offset | sed 's/[^0-9]*//g')
OFFSET=`${PAGESTUFF} "${i}.tmp" -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
if [ -z ${QUIET} ]; then
echo "Attaching signature at offset ${OFFSET}"
fi

View file

@ -27,19 +27,19 @@ ${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}"
grep -v CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
SIZE=$(pagestuff "$i" -p | tail -2 | grep size | sed 's/[^0-9]*//g')
OFFSET=$(pagestuff "$i" -p | tail -2 | grep offset | sed 's/[^0-9]*//g')
SIZE=`pagestuff "$i" -p | tail -2 | grep size | sed 's/[^0-9]*//g'`
OFFSET=`pagestuff "$i" -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
SIGNFILE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}.sign"
DIRNAME="$(dirname "${SIGNFILE}")"
DIRNAME="`dirname "${SIGNFILE}"`"
mkdir -p "${DIRNAME}"
echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}"
dd if="$i" of="${SIGNFILE}" bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null
done
grep CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/$(echo "${i}" | sed "s|.*${BUNDLE}/||")"
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
RESOURCE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}"
DIRNAME="$(dirname "${RESOURCE}")"
DIRNAME="`dirname "${RESOURCE}"`"
mkdir -p "${DIRNAME}"
echo "Adding resource for: \"${TARGETFILE}\""
cp "${i}" "${RESOURCE}"

View file

@ -1,57 +0,0 @@
## Stratum Server Instructions
In simple terms, the stratum protocol is a protocol to distribute crypto mining work to multiple miners. Mining pools typically run a stratum endpoint that the various miners communicate with.
Please refer to other web sources for more information about mining pools or the stratum protocol.
When mining LBC, you can solo mine directly to an instance of a full node (using the node's wallet). Or you can mine as part of a pool.
You can host your own pool or use one of the many hosted LBC pools. See https://miningpoolstats.stream/lbry
This document refers to Yiimp, a derivative of Yaamp, as found here: https://github.com/tpruvot/yiimp.git .
Please refer to the instructions there as well. Yiimp has supported LBRY mining for several years.
Yiimp consists of two pieces: the web GUI for pool management (written in PHP) and the Stratum server (written in C++). The two communicate via polling a MySQL database (or MariaDB).
The web GUI and configuration of the pooling rewards, fees, etc. are out of scope here.
To help you with running Yiimp, we have created two docker images: one for the DB and one for the Yiimp Stratum Server. (See the subfolders.)
Use of the Docker images is optional; you can refer to other Yiimp and MySQL documentation for running it without Docker.
If you are using your own database instance, you will need to import the Yiimp SQL files to establish the yaamp database.
See https://github.com/tpruvot/yiimp/tree/next/sql .
### Sample Usage Steps:
#### 1. Run the full lbrycrd node:
```
./lbrycrdd -testnet -rpcuser=ruser -rpcpassword=rpswd -deprecatedrpc=validateaddress -deprecatedrpc=accounts -daemon
```
The included deprecated RPCs are required for compatibility with Yiimp.
It will need to be caught up to the current block before it is ready.
Remove `-testnet` for the real deal.
#### 2. Run and initialize the datatabase:
```
docker run -d -e MYSQL_ROOT_PASSWORD=patofpaq -e MYSQL_DATABASE=yaamp --network host --name db lbry/yiimp_db
docker exec -it db mysql -uroot -ppatofpaq
use yaamp;
delete from coins;
insert into coins(name, symbol, symbol2, algo, enable, auto_ready, rpcuser, rpcpasswd, rpchost, rpcport, rpccurl, rpcencoding, hasgetinfo, hassubmitblock, usememorypool, usesegwit, auxpow)
values('Local LBRY Instance', 'LBC', 'LBC', 'lbry', 1, 1, 'ruser', 'rpswd', '127.0.0.1', 19245, 1, 'utf-8', 0, 1, 0, 0, 0);
exit
```
Use port 19245 for testnet, port 9245 for main. Set usesegwit to 1 after the segwit fork is enabled on December 11, 2019.
#### 3. Run the stratum server:
```
docker run --network host -d lbry/yiimp_stratum
```
Alternatively, to get more output or see how its called directly:
```
docker run --network host -it lbry/yiimp_stratum bash
cat config/lbry.conf
./stratum config/lbry
```
When testing with an ASIC you may need to modify the TCP server address in said lbry.conf file to be an external IP address.
#### 4. Connect sgminer to it:
```
sgminer -k lbry -o stratum+tcp://127.0.0.1:3334/ -D -T -O mn824Su1wX7ip8WcNYzXwwWqvBvkeWGRo6:x
```
The username there is the account to receive payments from the pool. The password is unused. Tested with https://github.com/lbryio/sgminer-gm.
You can use whatever miner you prefer.

View file

@ -1,34 +0,0 @@
FROM mariadb:10.1-bionic
ARG REPOSITORY=https://github.com/tpruvot/yiimp.git
ENV BUILD_DEPS \
ca-certificates \
git
COPY init-db.sh /docker-entrypoint-initdb.d/
RUN apt-get update \
&& apt-get install -y --no-install-recommends ${BUILD_DEPS} \
&& git clone --progress ${REPOSITORY} ~/yiimp \
&& mkdir /tmp/sql \
&& mv ~/yiimp/sql/2016-04-03-yaamp.sql.gz /tmp/sql/0000-00-00-initial.sql.gz \
&& cp ~/yiimp/sql/*.sql /tmp/sql \
&& apt-get purge -y --auto-remove ${BUILD_DEPS} \
&& rm -rf /var/lib/apt/lists/* \
&& rm -rf ~/yiimp
EXPOSE 3306
ARG VCS_REF
ARG BUILD_DATE
LABEL maintainer="blockchain@lbry.com" \
decription="yiimp_db" \
version="1.0" \
org.label-schema.name="yiimp_db" \
org.label-schema.description="Use this to run a compatible MariaDB for yiimp's stratum server" \
org.label-schema.build-date=$BUILD_DATE \
org.label-schema.vcs-ref=$VCS_REF \
org.label-schema.vcs-url="https://github.com/lbryio/lbrycrd" \
org.label-schema.schema-version="1.0.0-rc1" \
org.label-schema.vendor="LBRY" \
org.label-schema.docker.cmd="docker build --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` --build-arg VCS_REF=`git rev-parse --short HEAD` -t lbry/yiimp_db yiimp_db"

View file

@ -1,10 +0,0 @@
#!/bin/bash
for f in /tmp/sql/*; do
case "$f" in
*.sql) echo "$0: running $f"; "${mysql[@]}" --force < "$f"; echo ;;
*.sql.gz) echo "$0: running $f"; gunzip -c "$f" | "${mysql[@]}"; echo ;;
*) echo "$0: ignoring $f" ;;
esac
echo
done

View file

@ -1,54 +0,0 @@
FROM alpine:3.7
ARG REPOSITORY=https://github.com/tpruvot/yiimp.git
ENV BUILD_DEPS \
build-base \
git
ENV RUN_DEPS \
curl-dev \
gmp-dev \
mariadb-dev \
libssh2-dev \
curl
RUN apk update \
&& apk add --no-cache ${BUILD_DEPS} \
&& apk add --no-cache ${RUN_DEPS} \
&& git clone --progress ${REPOSITORY} ~/yiimp \
&& sed -i 's/ulong/uint64_t/g' ~/yiimp/stratum/algos/rainforest.c \
&& find ~/yiimp -name '*akefile' -exec sed -i 's/-march=native//g' {} + \
&& make -C ~/yiimp/stratum/iniparser \
&& make -C ~/yiimp/stratum \
&& mkdir /var/stratum /var/stratum/config \
&& cp ~/yiimp/stratum/run.sh /var/stratum \
&& cp ~/yiimp/stratum/config/run.sh /var/stratum/config \
&& cp ~/yiimp/stratum/stratum /var/stratum \
&& cp ~/yiimp/stratum/config.sample/lbry.conf /var/stratum/config \
&& sed -i 's/yaamp.com/127.0.0.1/g' /var/stratum/config/lbry.conf \
&& sed -i 's/yaampdb/127.0.0.1/g' /var/stratum/config/lbry.conf \
&& rm -rf ~/yiimp \
&& apk del ${BUILD_DEPS} \
&& rm -rf /var/cache/apk/*
RUN apk add --no-cache bash
ARG VCS_REF
ARG BUILD_DATE
LABEL maintainer="blockchain@lbry.com" \
decription="yiimp_stratum" \
version="1.0" \
org.label-schema.name="yiimp_stratum" \
org.label-schema.description="Use this to run yiimp's stratum server in lbry mode" \
org.label-schema.build-date=$BUILD_DATE \
org.label-schema.vcs-ref=$VCS_REF \
org.label-schema.vcs-url="https://github.com/lbryio/lbrycrd" \
org.label-schema.schema-version="1.0.0-rc1" \
org.label-schema.vendor="LBRY" \
org.label-schema.docker.cmd="docker build --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` --build-arg VCS_REF=`git rev-parse --short HEAD` -t lbry/yiimp_stratum yiimp_stratum"
WORKDIR /var/stratum
CMD ["./stratum", "config/lbry"]
EXPOSE 3334

View file

@ -0,0 +1,20 @@
--- src/dbinc/atomic.h 2013-03-12 14:07:22.000000000 -0400
+++ src/dbinc/atomic.h.change 2013-03-12 14:06:35.000000000 -0400
@@ -144,7 +144,7 @@
#define atomic_inc(env, p) __atomic_inc(p)
#define atomic_dec(env, p) __atomic_dec(p)
#define atomic_compare_exchange(env, p, o, n) \
- __atomic_compare_exchange((p), (o), (n))
+ __atomic_compare_exchange_db((p), (o), (n))
static inline int __atomic_inc(db_atomic_t *p)
{
int temp;
@@ -176,7 +176,7 @@
* http://gcc.gnu.org/onlinedocs/gcc-4.1.0/gcc/Atomic-Builtins.html
* which configure could be changed to use.
*/
-static inline int __atomic_compare_exchange(
+static inline int __atomic_compare_exchange_db(
db_atomic_t *p, atomic_value_t oldval, atomic_value_t newval)
{
atomic_value_t was;

View file

@ -23,7 +23,7 @@ TIMESERVER=http://timestamp.comodoca.com
CERTFILE="win-codesign.cert"
mkdir -p "${OUTSUBDIR}"
basename -a $(ls -1 "${SRCDIR}"/*-unsigned.exe) | while read UNSIGNED; do
basename -a `ls -1 "${SRCDIR}"/*-unsigned.exe` | while read UNSIGNED; do
echo Signing "${UNSIGNED}"
"${OSSLSIGNCODE}" sign -certs "${CERTFILE}" -t "${TIMESERVER}" -in "${SRCDIR}/${UNSIGNED}" -out "${WORKDIR}/${UNSIGNED}" "$@"
"${OSSLSIGNCODE}" extract-signature -pem -in "${WORKDIR}/${UNSIGNED}" -out "${OUTSUBDIR}/${UNSIGNED}.pem" && rm "${WORKDIR}/${UNSIGNED}"

3
depends/.gitignore vendored
View file

@ -7,6 +7,3 @@ x86_64*
i686*
mips*
arm*
aarch64*
riscv32*
riscv64*

View file

@ -1,12 +1,13 @@
.NOTPARALLEL :
SOURCES_PATH ?= $(BASEDIR)/sources
WORK_PATH = $(BASEDIR)/work
BASE_CACHE ?= $(BASEDIR)/built
SDK_PATH ?= $(BASEDIR)/SDKs
NO_QT ?=
NO_WALLET ?=
NO_UPNP ?=
ICU_DIR ?=
ICU_ONLY ?=
FALLBACK_DOWNLOAD_PATH ?= https://bitcoincore.org/depends-sources
BUILD = $(shell ./config.guess)
@ -22,6 +23,7 @@ BUILD_ID_SALT ?= salt
host:=$(BUILD)
ifneq ($(HOST),)
host:=$(HOST)
host_toolchain:=$(HOST)-
endif
ifneq ($(DEBUG),)
@ -30,9 +32,9 @@ else
release_type=release
endif
base_build_dir=$(WORK_PATH)/build
base_staging_dir=$(WORK_PATH)/staging
base_download_dir=$(WORK_PATH)/download
base_build_dir=$(BASEDIR)/work/build
base_staging_dir=$(BASEDIR)/work/staging
base_download_dir=$(BASEDIR)/work/download
canonical_host:=$(shell ./config.sub $(HOST))
build:=$(shell ./config.sub $(BUILD))
@ -73,7 +75,11 @@ include hosts/$(host_os).mk
include hosts/default.mk
include builders/$(build_os).mk
include builders/default.mk
ifeq ($(ICU_ONLY),)
include packages/packages.mk
else
include packages/packages.icu.mk
endif
build_id_string:=$(BUILD_ID_SALT)
build_id_string+=$(shell $(build_CC) --version 2>/dev/null)
@ -89,17 +95,14 @@ $(host_arch)_$(host_os)_id_string+=$(shell $(host_CXX) --version 2>/dev/null)
$(host_arch)_$(host_os)_id_string+=$(shell $(host_RANLIB) --version 2>/dev/null)
$(host_arch)_$(host_os)_id_string+=$(shell $(host_STRIP) --version 2>/dev/null)
qt_packages_$(NO_QT) = $(qt_packages) $(qt_$(host_os)_packages) $(qt_$(host_arch)_$(host_os)_packages)
qt_packages_$(NO_QT) = $(qt_packages) $(qt_$(host_os)_packages)
qt_native_packages_$(NO_QT) = $(qt_native_packages)
wallet_packages_$(NO_WALLET) = $(wallet_packages)
upnp_packages_$(NO_UPNP) = $(upnp_packages)
packages += $($(host_arch)_$(host_os)_packages) $($(host_os)_packages) $(qt_packages_) $(wallet_packages_) $(upnp_packages_)
native_packages += $($(host_arch)_$(host_os)_native_packages) $($(host_os)_native_packages)
ifneq ($(qt_packages_),)
native_packages += $(qt_native_packages)
endif
$(info $$packages is [${packages}])
native_packages += $($(host_arch)_$(host_os)_native_packages) $($(host_os)_native_packages) $(qt_native_packages_)
all_packages = $(packages) $(native_packages)
meta_depends = Makefile funcs.mk builders/default.mk hosts/default.mk hosts/$(host_os).mk builders/$(build_os).mk
@ -122,8 +125,8 @@ $(host_prefix)/.stamp_$(final_build_id): $(native_packages) $(packages)
$(host_prefix)/share/config.site : config.site.in $(host_prefix)/.stamp_$(final_build_id)
$(AT)@mkdir -p $(@D)
$(AT)sed -e 's|@HOST@|$(host)|' \
-e 's|@CC@|$(host_CC)|' \
-e 's|@CXX@|$(host_CXX)|' \
-e 's|@CC@|$(toolchain_path)$(host_CC)|' \
-e 's|@CXX@|$(toolchain_path)$(host_CXX)|' \
-e 's|@AR@|$(toolchain_path)$(host_AR)|' \
-e 's|@RANLIB@|$(toolchain_path)$(host_RANLIB)|' \
-e 's|@NM@|$(toolchain_path)$(host_NM)|' \
@ -131,11 +134,11 @@ $(host_prefix)/share/config.site : config.site.in $(host_prefix)/.stamp_$(final_
-e 's|@build_os@|$(build_os)|' \
-e 's|@host_os@|$(host_os)|' \
-e 's|@CFLAGS@|$(strip $(host_CFLAGS) $(host_$(release_type)_CFLAGS))|' \
-e 's|@CXXFLAGS@|$(strip -pipe $(host_$(release_type)_CXXFLAGS))|' \
-e 's|@CXXFLAGS@|$(strip $(host_CXXFLAGS) $(host_$(release_type)_CXXFLAGS))|' \
-e 's|@CPPFLAGS@|$(strip $(host_CPPFLAGS) $(host_$(release_type)_CPPFLAGS))|' \
-e 's|@LDFLAGS@|$(strip $(host_LDFLAGS) $(host_$(release_type)_LDFLAGS))|' \
-e 's|@allow_host_packages@|$(ALLOW_HOST_PACKAGES)|' \
-e 's|@no_qt@|$(NO_QT)|' \
-e 's|@ICU_DIR@|$(ICU_DIR)|' \
-e 's|@no_wallet@|$(NO_WALLET)|' \
-e 's|@no_upnp@|$(NO_UPNP)|' \
-e 's|@debug@|$(DEBUG)|' \
@ -166,23 +169,17 @@ $(host_prefix)/share/config.site: check-packages
check-packages: check-sources
clean-all: clean
@rm -rf $(SOURCES_PATH) x86_64* i686* mips* arm* aarch64* riscv32* riscv64*
clean:
@rm -rf $(WORK_PATH) $(BASE_CACHE) $(BUILD)
install: check-packages $(host_prefix)/share/config.site
download-one: check-sources $(all_sources)
download-osx:
@$(MAKE) -s HOST=x86_64-apple-darwin14 download-one
@$(MAKE) -s HOST=x86_64-apple-darwin11 download-one
download-linux:
@$(MAKE) -s HOST=x86_64-unknown-linux-gnu download-one
download-win:
@$(MAKE) -s HOST=x86_64-w64-mingw32 download-one
download: download-osx download-linux download-win
.PHONY: install cached clean clean-all download-one download-osx download-linux download-win download check-packages check-sources
.PHONY: install cached download-one download-osx download-linux download-win download check-packages check-sources

View file

@ -22,42 +22,17 @@ Common `host-platform-triplets` for cross compilation are:
- `i686-w64-mingw32` for Win32
- `x86_64-w64-mingw32` for Win64
- `x86_64-apple-darwin14` for macOS
- `arm-linux-gnueabihf` for Linux ARM 32 bit
- `aarch64-linux-gnu` for Linux ARM 64 bit
- `riscv32-linux-gnu` for Linux RISC-V 32 bit
- `riscv64-linux-gnu` for Linux RISC-V 64 bit
- `x86_64-apple-darwin11` for MacOSX
- `arm-linux-gnueabihf` for Linux ARM
No other options are needed, the paths are automatically configured.
Install the required dependencies: Ubuntu & Debian
--------------------------------------------------
For macOS cross compilation:
sudo apt-get install curl librsvg2-bin libtiff-tools bsdmainutils cmake imagemagick libcap-dev libz-dev libbz2-dev python-setuptools
For Win32/Win64 cross compilation:
- see [build-windows.md](../doc/build-windows.md#cross-compilation-for-ubuntu-and-windows-subsystem-for-linux)
For linux (including i386, ARM) cross compilation:
sudo apt-get install curl g++-aarch64-linux-gnu g++-4.8-aarch64-linux-gnu gcc-4.8-aarch64-linux-gnu binutils-aarch64-linux-gnu g++-arm-linux-gnueabihf g++-4.8-arm-linux-gnueabihf gcc-4.8-arm-linux-gnueabihf binutils-arm-linux-gnueabihf g++-4.8-multilib gcc-4.8-multilib binutils-gold bsdmainutils
For linux RISC-V 64-bit cross compilation (there are no packages for 32-bit):
sudo apt-get install curl g++-riscv64-linux-gnu binutils-riscv64-linux-gnu
RISC-V known issue: gcc-7.3.0 and gcc-7.3.1 result in a broken `test_lbrycrd` executable (see https://github.com/bitcoin/bitcoin/pull/13543),
this is apparently fixed in gcc-8.1.0.
Dependency Options:
The following can be set when running make: make FOO=bar
SOURCES_PATH: downloaded sources will be placed here
BASE_CACHE: built packages will be placed here
SDK_PATH: Path where sdk's can be found (used by macOS)
SDK_PATH: Path where sdk's can be found (used by OSX)
FALLBACK_DOWNLOAD_PATH: If a source file can't be fetched, try here before giving up
NO_QT: Don't download/build/cache qt and its dependencies
NO_WALLET: Don't download/build/cache libs needed to enable the wallet
@ -67,12 +42,12 @@ The following can be set when running make: make FOO=bar
BUILD_ID_SALT: Optional salt to use when generating build package ids
If some packages are not built, for example `make NO_WALLET=1`, the appropriate
options will be passed to lbrycrd's configure. In this case, `--disable-wallet`.
options will be passed to bitcoin's configure. In this case, `--disable-wallet`.
Additional targets:
download: run 'make download' to fetch all sources without building them
download-osx: run 'make download-osx' to fetch all sources needed for macOS builds
download-osx: run 'make download-osx' to fetch all sources needed for osx builds
download-win: run 'make download-win' to fetch all sources needed for win builds
download-linux: run 'make download-linux' to fetch all sources needed for linux builds

View file

@ -1,17 +1,17 @@
build_darwin_CC:=$(shell xcrun -f clang)
build_darwin_CXX:=$(shell xcrun -f clang++)
build_darwin_AR:=$(shell xcrun -f ar)
build_darwin_RANLIB:=$(shell xcrun -f ranlib)
build_darwin_STRIP:=$(shell xcrun -f strip)
build_darwin_OTOOL:=$(shell xcrun -f otool)
build_darwin_NM:=$(shell xcrun -f nm)
build_darwin_CC: = $(shell xcrun -f clang)
build_darwin_CXX: = $(shell xcrun -f clang++)
build_darwin_AR: = $(shell xcrun -f ar)
build_darwin_RANLIB: = $(shell xcrun -f ranlib)
build_darwin_STRIP: = $(shell xcrun -f strip)
build_darwin_OTOOL: = $(shell xcrun -f otool)
build_darwin_NM: = $(shell xcrun -f nm)
build_darwin_INSTALL_NAME_TOOL:=$(shell xcrun -f install_name_tool)
build_darwin_SHA256SUM=shasum -a 256
build_darwin_DOWNLOAD=curl --location --fail --connect-timeout $(DOWNLOAD_CONNECT_TIMEOUT) --retry $(DOWNLOAD_RETRIES) -o
build_darwin_SHA256SUM = shasum -a 256
build_darwin_DOWNLOAD = curl --location --fail --connect-timeout $(DOWNLOAD_CONNECT_TIMEOUT) --retry $(DOWNLOAD_RETRIES) -o
#darwin host on darwin builder. overrides darwin host preferences.
darwin_CC=$(shell xcrun -f clang) -mmacosx-version-min=$(OSX_MIN_VERSION)
darwin_CXX:=$(shell xcrun -f clang++) -mmacosx-version-min=$(OSX_MIN_VERSION) -stdlib=libc++
darwin_CXX:=$(shell xcrun -f clang++) -mmacosx-version-min=$(OSX_MIN_VERSION)
darwin_AR:=$(shell xcrun -f ar)
darwin_RANLIB:=$(shell xcrun -f ranlib)
darwin_STRIP:=$(shell xcrun -f strip)

611
depends/config.guess vendored

File diff suppressed because it is too large Load diff

View file

@ -1,25 +1,27 @@
depends_prefix="`dirname ${ac_site_file}`/.."
cross_compiling=maybe
host_alias=@HOST@
ac_tool_prefix=${host_alias}-
if test -z $with_boost; then
with_boost=$depends_prefix
with_boost=$prefix
fi
if test -z $with_qt_plugindir; then
with_qt_plugindir=$depends_prefix/plugins
with_qt_plugindir=$prefix/plugins
fi
if test -z $with_qt_translationdir; then
with_qt_translationdir=$depends_prefix/translations
with_qt_translationdir=$prefix/translations
fi
if test -z $with_qt_bindir && test -z "@no_qt@"; then
with_qt_bindir=$depends_prefix/native/bin
if test -z $with_qt_bindir; then
with_qt_bindir=$prefix/native/bin
fi
if test -z $with_protoc_bindir && test -z "@no_qt@"; then
with_protoc_bindir=$depends_prefix/native/bin
if test -z $with_protoc_bindir; then
with_protoc_bindir=$prefix/native/bin
fi
if test -z $with_comparison_tool; then
with_comparison_tool=$prefix/native/share/BitcoindComparisonTool_jar/BitcoindComparisonTool.jar
fi
with_icu_dir=@icu_dir@
if test -z $enable_wallet && test -n "@no_wallet@"; then
enable_wallet=no
@ -40,31 +42,32 @@ fi
if test x@host_os@ = xmingw32; then
if test -z $with_qt_incdir; then
with_qt_incdir=$depends_prefix/include
with_qt_incdir=$prefix/include
fi
if test -z $with_qt_libdir; then
with_qt_libdir=$depends_prefix/lib
with_qt_libdir=$prefix/lib
fi
fi
PATH=$depends_prefix/native/bin:$PATH
PATH=$prefix/native/bin:$PATH
PKG_CONFIG="`which pkg-config` --static"
# These two need to remain exported because pkg-config does not see them
# otherwise. That means they must be unexported at the end of configure.ac to
# avoid ruining the cache. Sigh.
export PKG_CONFIG_PATH=$depends_prefix/share/pkgconfig:$depends_prefix/lib/pkgconfig
if test -z "@allow_host_packages@"; then
export PKGCONFIG_LIBDIR=
fi
CPPFLAGS="-I$depends_prefix/include/ $CPPFLAGS"
LDFLAGS="-L$depends_prefix/lib $LDFLAGS"
export PKG_CONFIG_LIBDIR=$prefix/lib/pkgconfig
export PKG_CONFIG_PATH=$prefix/share/pkgconfig
CPPFLAGS="-I$prefix/include/ $CPPFLAGS"
LDFLAGS="-L$prefix/lib $LDFLAGS"
CC="@CC@"
CXX="@CXX@"
OBJC="${CC}"
PYTHONPATH=$depends_prefix/native/lib/python/dist-packages:$PYTHONPATH
OBJCXX="${CXX}"
CCACHE=$prefix/native/bin/ccache
PYTHONPATH=$prefix/native/lib/python/dist-packages:$PYTHONPATH
if test -n "@AR@"; then
AR=@AR@
@ -97,3 +100,6 @@ fi
if test -n "@LDFLAGS@"; then
LDFLAGS="@LDFLAGS@ $LDFLAGS"
fi
if test -n "@ICU_DIR@"; then
ICU_DIR="@ICU_DIR@"
fi

1669
depends/config.sub vendored

File diff suppressed because it is too large Load diff

View file

@ -7,7 +7,7 @@ In theory, binaries for any target OS/architecture can be created, from a
builder running any OS/architecture. In practice, build-side tools must be
specified when the defaults don't fit, and packages must be amended to work
on new hosts. For now, a build architecture of x86_64 is assumed, either on
Linux or macOS.
Linux or OSX.
### No reliance on timestamps

View file

@ -174,7 +174,7 @@ $($(1)_preprocessed): | $($(1)_dependencies) $($(1)_extracted)
$(AT)echo Preprocessing $(1)...
$(AT)mkdir -p $$(@D) $($(1)_patch_dir)
$(AT)$(foreach patch,$($(1)_patches),cd $(PATCHES_PATH)/$(1); cp $(patch) $($(1)_patch_dir) ;)
$(AT)+cd $$(@D); $(call $(1)_preprocess_cmds, $(1))
$(AT)cd $$(@D); $(call $(1)_preprocess_cmds, $(1))
$(AT)touch $$@
$($(1)_configured): | $($(1)_preprocessed)
$(AT)echo Configuring $(1)...
@ -190,7 +190,7 @@ $($(1)_built): | $($(1)_configured)
$($(1)_staged): | $($(1)_built)
$(AT)echo Staging $(1)...
$(AT)mkdir -p $($(1)_staging_dir)/$(host_prefix)
$(AT)+cd $($(1)_build_dir); $($(1)_stage_env) $(call $(1)_stage_cmds, $(1))
$(AT)cd $($(1)_build_dir); $($(1)_stage_env) $(call $(1)_stage_cmds, $(1))
$(AT)rm -rf $($(1)_extract_dir)
$(AT)touch $$@
$($(1)_postprocessed): | $($(1)_staged)

File diff suppressed because it is too large Load diff

View file

@ -1,19 +1,17 @@
OSX_MIN_VERSION=10.10
OSX_SDK_VERSION=10.11
OSX_MIN_VERSION=10.7
OSX_SDK_VERSION=10.9
OSX_SDK=$(SDK_PATH)/MacOSX$(OSX_SDK_VERSION).sdk
LD64_VERSION=253.9
darwin_CC=clang -target $(host) -mmacosx-version-min=$(OSX_MIN_VERSION) -isysroot $(OSX_SDK) -mlinker-version=$(LD64_VERSION) -B $(host_prefix)/native/bin
darwin_CXX=clang++ -target $(host) -mmacosx-version-min=$(OSX_MIN_VERSION) -isysroot $(OSX_SDK) -mlinker-version=$(LD64_VERSION) -stdlib=libc++ -B $(host_prefix)/native/bin
LD64_VERSION=241.9
darwin_CC=clang -target $(host) -mmacosx-version-min=$(OSX_MIN_VERSION) --sysroot $(OSX_SDK) -mlinker-version=$(LD64_VERSION)
darwin_CXX=clang++ -target $(host) -mmacosx-version-min=$(OSX_MIN_VERSION) --sysroot $(OSX_SDK) -mlinker-version=$(LD64_VERSION)
darwin_CFLAGS=-pipe
darwin_CXXFLAGS=$(darwin_CFLAGS) -std=c++11
darwin_CXXFLAGS=$(darwin_CFLAGS)
darwin_release_CFLAGS=-O2 -g
darwin_release_CFLAGS=-O2
darwin_release_CXXFLAGS=$(darwin_release_CFLAGS)
darwin_debug_CFLAGS=-Og -g
darwin_debug_CXXFLAGS=-O0 -g
darwin_debug_CFLAGS=-O1
darwin_debug_CXXFLAGS=$(darwin_debug_CFLAGS)
darwin_native_toolchain=native_cctools

View file

@ -1,7 +1,3 @@
ifneq ($(host),$(build))
host_toolchain:=$(host)-
endif
default_host_CC = $(host_toolchain)gcc
default_host_CXX = $(host_toolchain)g++
default_host_AR = $(host_toolchain)ar

View file

@ -1,34 +1,31 @@
linux_CFLAGS=-pipe
linux_CXXFLAGS=$(linux_CFLAGS) -std=c++11
linux_CXXFLAGS=$(linux_CFLAGS)
linux_release_CFLAGS=-O3 -g
ifeq (1,$(shell ldd --version | head -1 | awk '{print $$NF < 2.28}'))
linux_release_CFLAGS+= -include $(BASEDIR)/glibc_version_header/force_link_glibc_2.19.h
endif
linux_release_CFLAGS=-O2
linux_release_CXXFLAGS=$(linux_release_CFLAGS)
linux_debug_CFLAGS=-O1 -g
linux_debug_CXXFLAGS=-O0 -g
linux_debug_CFLAGS=-O1
linux_debug_CXXFLAGS=$(linux_debug_CFLAGS)
linux_debug_CPPFLAGS=-D_GLIBCXX_DEBUG -D_GLIBCXX_DEBUG_PEDANTIC
ifeq (86,$(findstring 86,$(build_arch)))
i686_linux_CC=cc -m32
i686_linux_CXX=c++ -m32
i686_linux_CC=gcc -m32
i686_linux_CXX=g++ -m32
i686_linux_AR=ar
i686_linux_RANLIB=ranlib
i686_linux_NM=nm
i686_linux_STRIP=strip
x86_64_linux_CC=cc -m64
x86_64_linux_CXX=c++ -m64
x86_64_linux_CC=gcc -m64
x86_64_linux_CXX=g++ -m64
x86_64_linux_AR=ar
x86_64_linux_RANLIB=ranlib
x86_64_linux_NM=nm
x86_64_linux_STRIP=strip
else
i686_linux_CC=cc -m32
i686_linux_CXX=c++ -m32
x86_64_linux_CC=cc -m64
x86_64_linux_CXX=c++ -m64
i686_linux_CC=$(default_host_CC) -m32
i686_linux_CXX=$(default_host_CXX) -m32
x86_64_linux_CC=$(default_host_CC) -m64
x86_64_linux_CXX=$(default_host_CXX) -m64
endif

View file

@ -1,11 +1,10 @@
mingw32_CFLAGS=-pipe
mingw32_CXXFLAGS=$(mingw32_CFLAGS) -std=c++11
mingw32_CXXFLAGS=$(mingw32_CFLAGS)
mingw32_release_CFLAGS=-O2 -g
mingw32_release_CFLAGS=-O2
mingw32_release_CXXFLAGS=$(mingw32_release_CFLAGS)
mingw32_debug_CFLAGS=-O1 -g
mingw32_debug_CXXFLAGS=-O0 -g
mingw32_debug_CFLAGS=-O1
mingw32_debug_CXXFLAGS=$(mingw32_debug_CFLAGS)
mingw32_debug_CPPFLAGS=-D_GLIBCXX_DEBUG -D_GLIBCXX_DEBUG_PEDANTIC

View file

@ -1,6 +1,6 @@
package=bdb
$(package)_version=4.8.30
$(package)_download_path=https://download.oracle.com/berkeley-db
$(package)_download_path=http://download.oracle.com/berkeley-db
$(package)_file_name=db-$($(package)_version).NC.tar.gz
$(package)_sha256_hash=12edc0df75bf9abd7f82f821795bcee50f42cb2e5f76a6a281b85732798364ef
$(package)_build_subdir=build_unix
@ -9,13 +9,11 @@ define $(package)_set_vars
$(package)_config_opts=--disable-shared --enable-cxx --disable-replication
$(package)_config_opts_mingw32=--enable-mingw
$(package)_config_opts_linux=--with-pic
$(package)_cppflags_mingw32=-DUNICODE -D_UNICODE
endef
define $(package)_preprocess_cmds
sed -i.old 's/__atomic_compare_exchange/__atomic_compare_exchange_db/' dbinc/atomic.h && \
sed -i.old 's/atomic_init/atomic_init_db/' dbinc/atomic.h mp/mp_region.c mp/mp_mvcc.c mp/mp_fget.c mutex/mut_method.c mutex/mut_tas.c && \
cp -f $(BASEDIR)/config.guess $(BASEDIR)/config.sub dist
sed -i.old 's/atomic_init/atomic_init_db/' dbinc/atomic.h mp/mp_region.c mp/mp_mvcc.c mp/mp_fget.c mutex/mut_method.c mutex/mut_tas.c
endef
define $(package)_config_cmds
@ -29,4 +27,3 @@ endef
define $(package)_stage_cmds
$(MAKE) DESTDIR=$($(package)_staging_dir) install_lib install_include
endef

View file

@ -1,19 +1,16 @@
package=boost
$(package)_version=1_69_0
$(package)_download_path=https://boostorg.jfrog.io/artifactory/main/release/1.69.0/source/
$(package)_version=1_59_0
$(package)_download_path=http://sourceforge.net/projects/boost/files/boost/1.59.0
$(package)_file_name=$(package)_$($(package)_version).tar.bz2
$(package)_sha256_hash=8f32d4617390d1c2d16f26a27ab60d97807b35440d45891fa340fc2648b04406
$(package)_dependencies=icu
$(package)_sha256_hash=727a932322d94287b62abb1bd2d41723eec4356a7728909e38adb65ca25241ca
$(package)_dependencies:icu
define $(package)_set_vars
$(package)_config_opts_release=variant=release
$(package)_config_opts_debug=variant=debug
$(package)_config_opts=--layout=tagged --build-type=complete --user-config=user-config.jam
$(package)_config_opts+=threading=multi link=static -sNO_BZIP2=1 -sNO_ZLIB=1
$(package)_config_opts+=boost.locale.iconv=off boost.locale.posix=off boost.locale.icu=on boost.locale.std=off -sICU_PATH="$(host_prefix)"
# The stupid ICU_LINK handling reorders the dependencies alphabetically, thus making it impossible to get the link order correct.
# To work around this we're using the ldflags below but we need ICU_LINK to be non-blank so that we don't get an auto-generated conflict with ldflags.
$(package)_config_opts+=-sICU_LINK="-time"
$(package)_config_opts=--layout=tagged --build-type=complete --user-config=user-config.jam boost.locale.iconv=off boost.locale.posix=off
$(package)_config_opts+=threading=multi link=static -sNO_BZIP2=1 -sNO_ZLIB=1 -sICU_PATH=$(ICU_DIR)
$(package)_config_opts+=-sICU_LINK=-L$(ICU_DIR) -lsicudt -lsicuin -lsicuio -lsicule -lsiculx -lsicutest -lsicutu -lsicuuc
$(package)_config_opts_linux=threadapi=pthread runtime-link=shared
$(package)_config_opts_darwin=--toolset=darwin-4.2.1 runtime-link=shared
$(package)_config_opts_mingw32=binary-format=pe target-os=windows threadapi=win32 runtime-link=static
@ -24,15 +21,17 @@ $(package)_toolset_$(host_os)=gcc
$(package)_archiver_$(host_os)=$($(package)_ar)
$(package)_toolset_darwin=darwin
$(package)_archiver_darwin=$($(package)_libtool)
$(package)_config_libraries=chrono,filesystem,system,locale,thread,test
$(package)_cxxflags=-std=c++11 -fvisibility=hidden -Wno-deprecated
$(package)_config_libraries=chrono,filesystem,program_options,system,locale,regex,thread,test
$(package)_cxxflags=-fvisibility=hidden
$(package)_cxxflags_linux=-fPIC
# The ideal doesn't work because vars are evaluated before any dependency is processed:
# $(package)_ldflags=$$(shell PKG_CONFIG_SYSROOT_DIR=/ PKG_CONFIG_LIBDIR=$(host_prefix)/lib/pkgconfig PKG_CONFIG_PATH=$(host_prefix)/share/pkgconfig pkg-config icu-io icu-uc icu-i18n --libs)
# So we substitute poorly (as these may not actually match all scenarios):
$(package)_ldflags_mingw32=-L$(host_prefix)/lib -lsicuio -lsicuuc -lsicudt
$(package)_ldflags_linux=-L$(host_prefix)/lib -licuio -licuuc -licudata -licui18n
$(package)_ldflags_darwin=-L$(host_prefix)/lib -licuio -licuuc -licudata -licui18n
$(package)_config_env+=BOOST_ICU_ICONV="off"
$(package)_config_env+=BOOST_ICU_POSIX="off"
$(package)_config_env+=ICU_PREFIX=$(ICU_DIR)
$(package)_config_env+=BOOST_ICU_LIBS="-L$(ICU_DIR) -lsicudt -lsicuin -lsicuio -lsicule -lsiculx -lsicutest -lsicutu -lsicuuc"
$(package)_build_env+=BOOST_ICU_ICONV="off"
$(package)_build_env+=BOOST_ICU_POSIX="off"
$(package)_build_env+=ICU_PREFIX=$(ICU_DIR)
$(package)_build_env+=BOOST_ICU_LIBS="-L$(ICU_DIR) -lsicudt -lsicuin -lsicuio -lsicule -lsiculx -lsicutest -lsicutu -lsicuuc"
endef
define $(package)_preprocess_cmds
@ -40,13 +39,12 @@ define $(package)_preprocess_cmds
endef
define $(package)_config_cmds
./bootstrap.sh --with-icu="$(host_prefix)" --with-libraries="$(boost_config_libraries)"
echo "int main() { return 0; }" > ./libs/locale/build/has_icu_test.cpp && echo "int main() { return 0; }" > ./libs/regex/build/has_icu_test.cpp && echo "ICU INSTALL: $(ICU_DIR)" && echo "BOOST CONFIG LIBRARIES: $(boost_config_libraries)" && ./bootstrap.sh --with-icu=$(ICU_DIR) --with-libraries=$(boost_config_libraries)
endef
define $(package)_build_cmds
./b2 -d2 -j`getconf _NPROCESSORS_ONLN` -d1 --reconfigure --prefix=$($(package)_staging_prefix_dir) $($(package)_config_opts) stage
ICU_PATH=$(ICU_DIR) ./b2 link=static cxxflags=-fPIC -d0 -q -j12 --prefix=$($(package)_staging_prefix_dir) $($(package)_config_opts) install
endef
define $(package)_stage_cmds
./b2 -d0 -j`getconf _NPROCESSORS_ONLN` --prefix=$($(package)_staging_prefix_dir) $($(package)_config_opts) install
endef

View file

@ -1,8 +1,8 @@
package=dbus
$(package)_version=1.10.18
$(package)_download_path=https://dbus.freedesktop.org/releases/dbus
$(package)_version=1.8.6
$(package)_download_path=http://dbus.freedesktop.org/releases/dbus
$(package)_file_name=$(package)-$($(package)_version).tar.gz
$(package)_sha256_hash=6049ddd5f3f3e2618f615f1faeda0a115104423a7996b7aa73e2f36e38cc514a
$(package)_sha256_hash=eded83ca007b719f32761e60fd8b9ffd0f5796a4caf455b01b5a5ef740ebd23f
$(package)_dependencies=expat
define $(package)_set_vars

View file

@ -1,8 +1,8 @@
package=expat
$(package)_version=2.2.5
$(package)_download_path=https://github.com/libexpat/libexpat/releases/download/R_2_2_5/
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
$(package)_sha256_hash=d9dc32efba7e74f788fcc4f212a43216fc37cf5f23f4c2339664d473353aedf6
$(package)_version=2.1.0
$(package)_download_path=http://sourceforge.net/projects/expat/files/expat/$($(package)_version)
$(package)_file_name=$(package)-$($(package)_version).tar.gz
$(package)_sha256_hash=823705472f816df21c8f6aa026dd162b280806838bb55b3432b0fb1fcca7eb86
define $(package)_set_vars
$(package)_config_opts=--disable-static

View file

@ -1,8 +1,8 @@
package=fontconfig
$(package)_version=2.12.1
$(package)_version=2.11.1
$(package)_download_path=http://www.freedesktop.org/software/fontconfig/release/
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
$(package)_sha256_hash=b449a3e10c47e1d1c7a6ec6e2016cca73d3bd68fbbd4f0ae5cc6b573f7d6c7f3
$(package)_sha256_hash=dc62447533bca844463a3c3fd4083b57c90f18a70506e7a9f4936b5a1e516a99
$(package)_dependencies=freetype expat
define $(package)_set_vars
@ -13,13 +13,7 @@ define $(package)_config_cmds
$($(package)_autoconf)
endef
# 2.12.1 uses CHAR_WIDTH which is reserved and clashes with some glibc versions, but newer versions of fontconfig
# have broken makefiles which needlessly attempt to re-generate headers with gperf.
# Instead, change all uses of CHAR_WIDTH, and disable the rule that forces header re-generation.
# This can be removed once the upstream build is fixed.
define $(package)_build_cmds
sed -i 's/CHAR_WIDTH/CHARWIDTH/g' fontconfig/fontconfig.h src/fcobjshash.gperf src/fcobjs.h src/fcobjshash.h && \
sed -i 's/fcobjshash.h: fcobjshash.gperf/fcobjshash.h:/' src/Makefile && \
$(MAKE)
endef

View file

@ -1,8 +1,8 @@
package=freetype
$(package)_version=2.7.1
$(package)_download_path=http://download.savannah.gnu.org/releases/$(package)
$(package)_version=2.5.3
$(package)_download_path=http://downloads.sourceforge.net/$(package)
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
$(package)_sha256_hash=3a3bb2c4e15ffb433f2032f50a5b5a92558206822e22bfe8cbe339af4aa82f88
$(package)_sha256_hash=c0848b29d52ef3ca27ad92e08351f023c5e24ce8cea7d8fe69fc96358e65f75e
define $(package)_set_vars
$(package)_config_opts=--without-zlib --without-png --disable-static

View file

@ -1,38 +1,18 @@
package=icu
$(package)_version=63_2
$(package)_download_path=https://github.com/unicode-org/icu/releases/download/release-63-2/
$(package)_version=57_1
$(package)_download_path=http://download.icu-project.org/files/icu4c/57.1
$(package)_file_name=$(package)4c-$($(package)_version)-src.tgz
$(package)_sha256_hash=4671e985b5c11252bff3c2374ab84fd73c609f2603bb6eb23b8b154c69ea4215
$(package)_sha256_hash=ff8c67cb65949b1e7808f2359f2b80f722697048e90e7cfc382ec1fe229e9581
$(package)_build_subdir=source
$(package)_standard_opts=--disable-extras --disable-strict --enable-static --disable-shared --disable-tests --disable-samples --disable-dyload --disable-layoutex
define $(package)_set_vars
$(package)_config_opts=$($(package)_standard_opts)
$(package)_config_opts_debug=--enable-debug --disable-release
$(package)_config_opts=--enable-debug --disable-release --host=x86_64-w64-mingw32 --with-cross-build=/tmp/icu_staging/icu/source --enable-extras=no --enable-strict=no --enable-static --enable-shared=no --enable-tests=no --enable-samples=no --enable-dyload=no
$(package)_config_opts_release=--disable-debug --enable-release
$(package)_config_opts_mingw32=--with-cross-build="$($(package)_extract_dir)/build"
$(package)_config_opts_darwin=--with-cross-build="$($(package)_extract_dir)/build" LIBTOOL="$($(package)_libtool)"
$(package)_archiver_darwin=$($(package)_libtool)
$(package)_cflags_linux=-fPIC
$(package)_cppflags_linux=-fPIC
$(package)_cxxflags=-std=c++11
endef
define $(package)_preprocess_cmds
PKG_CONFIG_SYSROOT_DIR=/ \
PKG_CONFIG_LIBDIR=$(host_prefix)/lib/pkgconfig \
PKG_CONFIG_PATH=$(host_prefix)/share/pkgconfig \
mkdir -p build && cd build && \
../source/runConfigureICU Linux $($(package)_standard_opts) CXXFLAGS=-std=c++11 && \
$(MAKE) && cd ..
$(package)_config_opts_mingw32=--host=x86_64-w64-mingw32
$(package)_config_opts_linux=--with-pic
endef
define $(package)_config_cmds
PKG_CONFIG_SYSROOT_DIR=/ \
PKG_CONFIG_LIBDIR=$(host_prefix)/lib/pkgconfig \
PKG_CONFIG_PATH=$(host_prefix)/share/pkgconfig \
sed -i.old 's|^GEN_DEPS.c=.*|& $($(package)_cflags)|' config/mh-mingw* && \
sed -i.old 's|^GEN_DEPS.cc=.*|& $($(package)_cxxflags)|' config/mh-mingw* && \
$($(package)_autoconf)
endef
@ -41,5 +21,5 @@ define $(package)_build_cmds
endef
define $(package)_stage_cmds
$(MAKE) DESTDIR=$($(package)_staging_dir) install
$(MAKE) DESTDIR=/tmp/icu_install install
endef

View file

@ -1,15 +1,16 @@
package=libevent
$(package)_version=2.1.8-stable
$(package)_download_path=https://github.com/libevent/libevent/archive/
$(package)_file_name=release-$($(package)_version).tar.gz
$(package)_sha256_hash=316ddb401745ac5d222d7c529ef1eada12f58f6376a66c1118eee803cb70f83d
$(package)_version=2.0.22
$(package)_download_path=https://github.com/libevent/libevent/releases/download/release-2.0.22-stable
$(package)_file_name=$(package)-$($(package)_version)-stable.tar.gz
$(package)_sha256_hash=71c2c49f0adadacfdbe6332a372c38cf9c8b7895bb73dabeaa53cdcc1d4e1fa3
$(package)_patches=reuseaddr.patch
define $(package)_preprocess_cmds
./autogen.sh
patch -p1 < $($(package)_patch_dir)/reuseaddr.patch
endef
define $(package)_set_vars
$(package)_config_opts=--disable-shared --disable-openssl --disable-libevent-regress --disable-samples
$(package)_config_opts=--disable-shared --disable-openssl --disable-libevent-regress
$(package)_config_opts_release=--disable-debug-mode
$(package)_config_opts_linux=--with-pic
endef
@ -27,5 +28,4 @@ define $(package)_stage_cmds
endef
define $(package)_postprocess_cmds
rm lib/*.la
endef

View file

@ -1,12 +1,12 @@
package=miniupnpc
$(package)_version=2.0.20180203
$(package)_download_path=https://miniupnp.tuxfamily.org/files/
$(package)_version=1.9.20160209
$(package)_download_path=http://miniupnp.free.fr/files
$(package)_file_name=$(package)-$($(package)_version).tar.gz
$(package)_sha256_hash=90dda8c7563ca6cd4a83e23b3c66dbbea89603a1675bfdb852897c2c9cc220b7
$(package)_sha256_hash=572171eacc1d72537ce47b6f4571260757ab7bcfdaf54c3a55c7f88594d94b6f
define $(package)_set_vars
$(package)_build_opts=CC="$($(package)_cc)"
$(package)_build_opts_darwin=LIBTOOL="$($(package)_libtool)"
$(package)_build_opts_darwin=OS=Darwin LIBTOOL="$($(package)_libtool)"
$(package)_build_opts_mingw32=-f Makefile.mingw
$(package)_build_env+=CFLAGS="$($(package)_cflags) $($(package)_cppflags)" AR="$($(package)_ar)"
endef

View file

@ -1,8 +1,8 @@
package=native_biplist
$(package)_version=1.0.3
$(package)_download_path=https://bitbucket.org/wooster/biplist/downloads
$(package)_version=0.9
$(package)_download_path=https://pypi.python.org/packages/source/b/biplist
$(package)_file_name=biplist-$($(package)_version).tar.gz
$(package)_sha256_hash=4c0549764c5fe50b28042ec21aa2e14fe1a2224e239a1dae77d9e7f3932aa4c6
$(package)_sha256_hash=b57cadfd26e4754efdf89e9e37de87885f9b5c847b2615688ca04adfaf6ca604
$(package)_install_libdir=$(build_prefix)/lib/python/dist-packages
define $(package)_build_cmds

View file

@ -0,0 +1,25 @@
package=native_ccache
$(package)_version=3.2.4
$(package)_download_path=http://samba.org/ftp/ccache
$(package)_file_name=ccache-$($(package)_version).tar.bz2
$(package)_sha256_hash=ffeb967edb549e67da0bd5f44f729a2022de9fdde65dfd80d2a7204d7f75332e
define $(package)_set_vars
$(package)_config_opts=
endef
define $(package)_config_cmds
$($(package)_autoconf)
endef
define $(package)_build_cmds
$(MAKE)
endef
define $(package)_stage_cmds
$(MAKE) DESTDIR=$($(package)_staging_dir) install
endef
define $(package)_postprocess_cmds
rm -rf lib include
endef

View file

@ -1,20 +1,43 @@
package=native_cctools
$(package)_version=807d6fd1be5d2224872e381870c0a75387fe05e6
$(package)_version=ee31ae567931c426136c94aad457c7b51d844beb
$(package)_download_path=https://github.com/theuni/cctools-port/archive
$(package)_file_name=$($(package)_version).tar.gz
$(package)_sha256_hash=a09c9ba4684670a0375e42d9d67e7f12c1f62581a27f28f7c825d6d7032ccc6a
$(package)_sha256_hash=ef107e6ab1b3994cb22e14f4f5c59ea0c0b5a988e6b21d42ed9616b018bbcbf9
$(package)_build_subdir=cctools
$(package)_clang_version=3.3
$(package)_clang_download_path=http://llvm.org/releases/$($(package)_clang_version)
$(package)_clang_download_file=clang+llvm-$($(package)_clang_version)-amd64-Ubuntu-12.04.2.tar.gz
$(package)_clang_file_name=clang-llvm-$($(package)_clang_version)-amd64-Ubuntu-12.04.2.tar.gz
$(package)_clang_sha256_hash=60d8f69f032d62ef61bf527857ebb933741ec3352d4d328c5516aa520662dab7
$(package)_extra_sources=$($(package)_clang_file_name)
define $(package)_fetch_cmds
$(call fetch_file,$(package),$($(package)_download_path),$($(package)_download_file),$($(package)_file_name),$($(package)_sha256_hash)) && \
$(call fetch_file,$(package),$($(package)_clang_download_path),$($(package)_clang_download_file),$($(package)_clang_file_name),$($(package)_clang_sha256_hash))
endef
define $(package)_extract_cmds
mkdir -p $($(package)_extract_dir) && \
echo "$($(package)_sha256_hash) $($(package)_source)" > $($(package)_extract_dir)/.$($(package)_file_name).hash && \
echo "$($(package)_clang_sha256_hash) $($(package)_source_dir)/$($(package)_clang_file_name)" >> $($(package)_extract_dir)/.$($(package)_file_name).hash && \
$(build_SHA256SUM) -c $($(package)_extract_dir)/.$($(package)_file_name).hash && \
mkdir -p toolchain/bin toolchain/lib/clang/3.5/include && \
tar --strip-components=1 -C toolchain -xf $($(package)_source_dir)/$($(package)_clang_file_name) && \
echo "#!/bin/sh" > toolchain/bin/$(host)-dsymutil && \
echo "exit 0" >> toolchain/bin/$(host)-dsymutil && \
chmod +x toolchain/bin/$(host)-dsymutil && \
tar --strip-components=1 -xf $($(package)_source)
endef
define $(package)_set_vars
$(package)_config_opts=--target=$(host) --disable-lto-support --prefix=/
$(package)_ldflags+=-Wl,-rpath=\\$$$$$$$$\$$$$$$$$ORIGIN/../lib
$(package)_cc=cc
$(package)_cxx=c++
$(package)_config_opts=--target=$(host) --disable-libuuid
$(package)_ldflags+=-Wl,-rpath=\\$$$$$$$$\$$$$$$$$ORIGIN/../lib
$(package)_cc=$($(package)_extract_dir)/toolchain/bin/clang
$(package)_cxx=$($(package)_extract_dir)/toolchain/bin/clang++
endef
define $(package)_preprocess_cmds
cd $($(package)_build_subdir); ./autogen.sh && \
sed -i.old "/define HAVE_PTHREADS/d" ld64/src/ld/InputFiles.h
cd $($(package)_build_subdir); ./autogen.sh
endef
define $(package)_config_cmds
@ -26,6 +49,14 @@ define $(package)_build_cmds
endef
define $(package)_stage_cmds
mkdir -p $($(package)_staging_prefix_dir) && \
$(MAKE) DESTDIR=$($(package)_staging_prefix_dir) install
$(MAKE) DESTDIR=$($(package)_staging_dir) install && \
cd $($(package)_extract_dir)/toolchain && \
mkdir -p $($(package)_staging_prefix_dir)/lib/clang/$($(package)_clang_version)/include && \
mkdir -p $($(package)_staging_prefix_dir)/bin $($(package)_staging_prefix_dir)/include && \
cp -P bin/clang bin/clang++ $($(package)_staging_prefix_dir)/bin/ &&\
cp lib/libLTO.so $($(package)_staging_prefix_dir)/lib/ && \
cp -rf lib/clang/$($(package)_clang_version)/include/* $($(package)_staging_prefix_dir)/lib/clang/$($(package)_clang_version)/include/ && \
cp bin/$(host)-dsymutil $($(package)_staging_prefix_dir)/bin && \
if `test -d include/c++/`; then cp -rf include/c++/ $($(package)_staging_prefix_dir)/include/; fi && \
if `test -d lib/c++/`; then cp -rf lib/c++/ $($(package)_staging_prefix_dir)/lib/; fi
endef

View file

@ -0,0 +1,21 @@
package=native_comparisontool
$(package)_version=8c6666f
$(package)_download_path=https://github.com/theuni/bitcoind-comparisontool/raw/master
$(package)_file_name=pull-tests-$($(package)_version).jar
$(package)_sha256_hash=a865332b3827abcde684ab79f5f43c083b0b6a4c97ff5508c79f29fee24f11cd
$(package)_install_dirname=BitcoindComparisonTool_jar
$(package)_install_filename=BitcoindComparisonTool.jar
define $(package)_extract_cmds
endef
define $(package)_configure_cmds
endef
define $(package)_build_cmds
endef
define $(package)_stage_cmds
mkdir -p $($(package)_staging_prefix_dir)/share/$($(package)_install_dirname) && \
cp $($(package)_source) $($(package)_staging_prefix_dir)/share/$($(package)_install_dirname)/$($(package)_install_filename)
endef

View file

@ -1,8 +1,9 @@
package=native_ds_store
$(package)_version=1.1.2
$(package)_download_path=https://github.com/al45tair/ds_store/archive/
$(package)_file_name=v$($(package)_version).tar.gz
$(package)_sha256_hash=3b3ecb7bf0a5157f5b6010bc3af7c141fb0ad3527084e63336220d22744bc20c
$(package)_version=c80c23706eae
$(package)_download_path=https://bitbucket.org/al45tair/ds_store/get
$(package)_download_file=$($(package)_version).tar.bz2
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
$(package)_sha256_hash=ce1aa412211610c63d567bbe3e06213006a2d5ba5d76d89399c151b5472cb0da
$(package)_install_libdir=$(build_prefix)/lib/python/dist-packages
$(package)_dependencies=native_biplist

View file

@ -1,8 +1,9 @@
package=native_mac_alias
$(package)_version=2.0.7
$(package)_download_path=https://github.com/al45tair/mac_alias/archive/
$(package)_file_name=v$($(package)_version).tar.gz
$(package)_sha256_hash=6f606d3b6bccd2112aeabf1a063f5b5ece87005a5d7e97c8faca23b916e88838
$(package)_version=1.1.0
$(package)_download_path=https://bitbucket.org/al45tair/mac_alias/get
$(package)_download_file=v$($(package)_version).tar.bz2
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
$(package)_sha256_hash=87ad827e66790028361e43fc754f68ed041a9bdb214cca03c853f079b04fb120
$(package)_install_libdir=$(build_prefix)/lib/python/dist-packages
define $(package)_build_cmds

View file

@ -5,54 +5,19 @@ $(package)_file_name=$(package)-$($(package)_version).tar.gz
$(package)_sha256_hash=8f9faeaebad088e772f4ef5e38252d472be4d878c6b3a2718c10a4fcebe7a41c
define $(package)_set_vars
$(package)_config_env=AR="$($(package)_ar)" RANLIB="$($(package)_ranlib)" CC="$($(package)_cc) $($(package)_cflags) $($(package)_cppflags)"
$(package)_config_opts=--prefix=$(host_prefix) --openssldir=$(host_prefix)/etc/openssl
$(package)_config_opts+=no-camellia
$(package)_config_opts+=no-capieng
$(package)_config_opts+=no-cast
$(package)_config_opts+=no-comp
$(package)_config_opts+=no-dso
$(package)_config_opts+=no-dtls1
$(package)_config_opts+=no-ec_nistp_64_gcc_128
$(package)_config_opts+=no-gost
$(package)_config_opts+=no-gmp
$(package)_config_opts+=no-heartbeats
$(package)_config_opts+=no-idea
$(package)_config_opts+=no-jpake
$(package)_config_opts+=no-krb5
$(package)_config_opts+=no-libunbound
$(package)_config_opts+=no-md2
$(package)_config_opts+=no-mdc2
$(package)_config_opts+=no-rc4
$(package)_config_opts+=no-rc5
$(package)_config_opts+=no-rdrand
$(package)_config_opts+=no-rfc3779
$(package)_config_opts+=no-rsax
$(package)_config_opts+=no-sctp
$(package)_config_opts+=no-seed
$(package)_config_opts+=no-sha0
$(package)_config_opts+=no-shared
$(package)_config_opts+=no-ssl-trace
$(package)_config_opts+=no-ssl2
$(package)_config_opts+=no-ssl3
$(package)_config_opts+=no-static_engine
$(package)_config_opts+=no-store
$(package)_config_opts+=no-unit-test
$(package)_config_opts+=no-weak-ssl-ciphers
$(package)_config_opts+=no-whirlpool
$(package)_config_opts+=no-zlib
$(package)_config_opts+=no-zlib-dynamic
$(package)_config_env=AR="$($(package)_ar)" RANLIB="$($(package)_ranlib)" CC="$($(package)_cc)"
$(package)_config_opts=--prefix=$(host_prefix) --openssldir=$(host_prefix)/etc/openssl no-zlib no-shared no-dso
$(package)_config_opts+=no-krb5 no-camellia no-capieng no-cast no-cms no-dtls1 no-gost no-gmp no-heartbeats no-idea no-jpake no-md2
$(package)_config_opts+=no-mdc2 no-rc5 no-rdrand no-rfc3779 no-rsax no-sctp no-seed no-sha0 no-static_engine no-whirlpool no-rc2 no-rc4 no-ssl2 no-ssl3
$(package)_config_opts+=$($(package)_cflags) $($(package)_cppflags)
$(package)_config_opts_linux=-fPIC -Wa,--noexecstack
$(package)_config_opts_x86_64_linux=linux-x86_64
$(package)_config_opts_i686_linux=linux-generic32
$(package)_config_opts_arm_linux=linux-generic32
$(package)_config_opts_armv7l_linux=linux-generic32
$(package)_config_opts_aarch64_linux=linux-generic64
$(package)_config_opts_mipsel_linux=linux-generic32
$(package)_config_opts_mips_linux=linux-generic32
$(package)_config_opts_powerpc_linux=linux-generic32
$(package)_config_opts_riscv32_linux=linux-generic32
$(package)_config_opts_riscv64_linux=linux-generic64
$(package)_config_opts_x86_64_darwin=darwin64-x86_64-cc
$(package)_config_opts_x86_64_mingw32=mingw64
$(package)_config_opts_i686_mingw32=mingw

View file

@ -0,0 +1 @@
packages:=icu

View file

@ -1,10 +1,12 @@
packages:=icu boost openssl libevent zeromq
packages:=boost openssl libevent
darwin_packages:=zeromq
linux_packages:=zeromq
native_packages := native_ccache native_comparisontool
qt_native_packages = native_protobuf
qt_packages = qrencode protobuf zlib
qt_linux_packages:=qt expat dbus libxcb xcb_proto libXau xproto freetype fontconfig libX11 xextproto libXext xtrans
qt_packages = qrencode protobuf
qt_linux_packages= qt expat dbus libxcb xcb_proto libXau xproto freetype fontconfig libX11 xextproto libXext xtrans
qt_darwin_packages=qt
qt_mingw32_packages=qt

View file

@ -1,7 +1,7 @@
package=qrencode
$(package)_version=3.4.4
$(package)_download_path=https://fukuchi.org/works/qrencode/
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
$(package)_file_name=qrencode-$(qrencode_version).tar.bz2
$(package)_sha256_hash=efe5188b1ddbcbf98763b819b146be6a90481aac30cfc8d858ab78a19cde1fa5
define $(package)_set_vars

View file

@ -1,50 +1,42 @@
PACKAGE=qt
$(package)_version=5.9.6
$(package)_download_path=https://download.qt.io/official_releases/qt/5.9/$($(package)_version)/submodules
$(package)_suffix=opensource-src-$($(package)_version).tar.xz
#$(package)_version=5.12.3
#$(package)_download_path=http://download.qt.io/official_releases/qt/5.12/$($(package)_version)/submodules
#$(package)_suffix=opensource-src-$($(package)_version).tar.gz
$(package)_version=5.5.0
$(package)_download_path=http://download.qt.io/official_releases/qt/5.5/$($(package)_version)/submodules
$(package)_suffix=opensource-src-$($(package)_version).tar.gz
$(package)_file_name=qtbase-$($(package)_suffix)
$(package)_sha256_hash=eed620cb268b199bd83b3fc6a471c51d51e1dc2dbb5374fc97a0cc75facbe36f
$(package)_dependencies=openssl zlib
$(package)_linux_dependencies=freetype fontconfig libxcb libX11 xproto libXext
$(package)_sha256_hash=7e82b1318f88e56a2a9376e069aa608d4fd96b48cb0e1b880ae658b0a1af0561
$(package)_dependencies=openssl
$(package)_linux_dependencies=freetype fontconfig dbus libxcb libX11 xproto libXext
$(package)_build_subdir=qtbase
$(package)_qt_libs=corelib network widgets gui plugins testlib
$(package)_patches=fix_qt_pkgconfig.patch mac-qmake.conf fix_configure_mac.patch fix_no_printer.patch fix_rcc_determinism.patch xkb-default.patch
$(package)_patches=mac-qmake.conf fix-xcb-include-order.patch mingw-uuidof.patch pidlist_absolute.patch
$(package)_qttranslations_file_name=qttranslations-$($(package)_suffix)
$(package)_qttranslations_sha256_hash=9822084f8e2d2939ba39f4af4c0c2320e45d5996762a9423f833055607604ed8
$(package)_qttranslations_sha256_hash=c4bd6db6e426965c6f8824c54e81f68bbd61e2bae1bcadc328c6e81c45902a0d
$(package)_qttools_file_name=qttools-$($(package)_suffix)
$(package)_qttools_sha256_hash=50e75417ec0c74bb8b1989d1d8e981ee83690dce7dfc0c2169f7c00f397e5117
$(package)_qttools_sha256_hash=d9e06bd19ecc86afba5e95d45a906d1bc1ad579aa70001e36143c1aaf695bdd6
$(package)_extra_sources = $($(package)_qttranslations_file_name)
$(package)_extra_sources += $($(package)_qttools_file_name)
define $(package)_set_vars
$(package)_config_opts_release = -release
$(package)_config_opts_debug = -debug
$(package)_config_opts += -bindir $(build_prefix)/bin
$(package)_config_opts += -c++std c++11
$(package)_config_opts += -confirm-license
$(package)_config_opts += -dbus-runtime
$(package)_config_opts += -hostprefix $(build_prefix)
$(package)_config_opts += -no-cups
$(package)_config_opts += -no-egl
$(package)_config_opts += -no-eglfs
$(package)_config_opts += -no-freetype
$(package)_config_opts += -no-gif
$(package)_config_opts_debug = -debug
$(package)_config_opts += -opensource -confirm-license
$(package)_config_opts += -no-audio-backend
$(package)_config_opts += -no-glib
$(package)_config_opts += -no-icu
$(package)_config_opts += -no-cups
$(package)_config_opts += -no-iconv
$(package)_config_opts += -no-kms
$(package)_config_opts += -no-linuxfb
$(package)_config_opts += -no-libudev
$(package)_config_opts += -no-mtdev
$(package)_config_opts += -no-openvg
$(package)_config_opts += -no-reduce-relocations
$(package)_config_opts += -no-gif
$(package)_config_opts += -no-freetype
$(package)_config_opts += -no-nis
$(package)_config_opts += -no-pch
$(package)_config_opts += -no-qml-debug
$(package)_config_opts += -nomake examples
$(package)_config_opts += -nomake tests
$(package)_config_opts += -no-feature-style-windowsmobile
$(package)_config_opts += -no-feature-style-windowsce
$(package)_config_opts += -no-sql-db2
$(package)_config_opts += -no-sql-ibase
$(package)_config_opts += -no-sql-oci
@ -54,27 +46,36 @@ $(package)_config_opts += -no-sql-odbc
$(package)_config_opts += -no-sql-psql
$(package)_config_opts += -no-sql-sqlite
$(package)_config_opts += -no-sql-sqlite2
$(package)_config_opts += -no-use-gold-linker
$(package)_config_opts += -no-xinput2
$(package)_config_opts += -nomake examples
$(package)_config_opts += -nomake tests
$(package)_config_opts += -opensource
$(package)_config_opts += -openssl-linked
$(package)_config_opts += -optimized-qmake
$(package)_config_opts += -pch
$(package)_config_opts += -pkg-config
$(package)_config_opts += -prefix $(host_prefix)
$(package)_config_opts += -qt-libpng
$(package)_config_opts += -qt-libjpeg
$(package)_config_opts += -qt-pcre
$(package)_config_opts += -system-zlib
$(package)_config_opts += -hostprefix $(build_prefix)
$(package)_config_opts += -bindir $(build_prefix)/bin
$(package)_config_opts += -no-c++11
$(package)_config_opts += -openssl-linked
$(package)_config_opts += -v
$(package)_config_opts += -static
$(package)_config_opts += -silent
$(package)_config_opts += -v
$(package)_config_opts += -no-feature-printer
$(package)_config_opts += -no-feature-printdialog
$(package)_config_opts += -no-feature-concurrent
$(package)_config_opts += -no-feature-xml
$(package)_config_opts += -pkg-config
$(package)_config_opts += -qt-libpng
$(package)_config_opts += -qt-libjpeg
$(package)_config_opts += -qt-zlib
$(package)_config_opts += -qt-pcre
$(package)_config_opts += -no-pulseaudio
$(package)_config_opts += -no-openvg
$(package)_config_opts += -no-xrender
$(package)_config_opts += -no-alsa
$(package)_config_opts += -no-mtdev
$(package)_config_opts += -no-gstreamer
$(package)_config_opts += -no-mitshm
$(package)_config_opts += -no-kms
$(package)_config_opts += -no-reduce-relocations
$(package)_config_opts += -no-egl
$(package)_config_opts += -no-eglfs
$(package)_config_opts += -no-linuxfb
$(package)_config_opts += -no-xinput2
$(package)_config_opts += -no-libudev
$(package)_config_opts += -no-use-gold-linker
$(package)_config_opts += -reduce-exports
$(package)_config_opts += -optimized-qmake
ifneq ($(build_os),darwin)
$(package)_config_opts_darwin = -xplatform macx-clang-linux
@ -86,19 +87,16 @@ $(package)_config_opts_darwin += -device-option MAC_TARGET=$(host)
$(package)_config_opts_darwin += -device-option MAC_LD64_VERSION=$(LD64_VERSION)
endif
$(package)_config_opts_linux = -qt-xkbcommon-x11
$(package)_config_opts_linux = -qt-xkbcommon
$(package)_config_opts_linux += -qt-xcb
$(package)_config_opts_linux += -system-freetype
$(package)_config_opts_linux += -no-feature-sessionmanager
$(package)_config_opts_linux += -no-sm
$(package)_config_opts_linux += -fontconfig
$(package)_config_opts_linux += -no-opengl
$(package)_config_opts_arm_linux += -platform linux-g++ -xplatform bitcoin-linux-g++
$(package)_config_opts_arm_linux = -platform linux-g++ -xplatform $(host)
$(package)_config_opts_i686_linux = -xplatform linux-g++-32
$(package)_config_opts_x86_64_linux = -xplatform linux-g++-64
$(package)_config_opts_aarch64_linux = -xplatform linux-aarch64-gnu-g++
$(package)_config_opts_mingw32 = -no-opengl -xplatform win32-g++ -device-option CROSS_COMPILE="$(host)-"
$(package)_build_env = QT_RCC_TEST=1
$(package)_build_env += QT_RCC_SOURCE_DATE_OVERRIDE=1
endef
define $(package)_fetch_cmds
@ -122,33 +120,24 @@ define $(package)_extract_cmds
endef
define $(package)_preprocess_cmds
sed -i.old "s|FT_Get_Font_Format|FT_Get_X11_Font_Format|" qtbase/src/platformsupport/fontdatabases/freetype/qfontengine_ft.cpp && \
sed -i.old "s|updateqm.commands = \$$$$\$$$$LRELEASE|updateqm.commands = $($(package)_extract_dir)/qttools/bin/lrelease|" qttranslations/translations/translations.pro && \
sed -i.old "/updateqm.depends =/d" qttranslations/translations/translations.pro && \
sed -i.old "s/src_plugins.depends = src_sql src_network/src_plugins.depends = src_network/" qtbase/src/src.pro && \
sed -i.old "s/src_plugins.depends = src_sql src_xml src_network/src_plugins.depends = src_xml src_network/" qtbase/src/src.pro && \
sed -i.old "s|X11/extensions/XIproto.h|X11/X.h|" qtbase/src/plugins/platforms/xcb/qxcbxsettings.cpp && \
sed -i.old 's/if \[ "$$$$XPLATFORM_MAC" = "yes" \]; then xspecvals=$$$$(macSDKify/if \[ "$$$$BUILD_ON_MAC" = "yes" \]; then xspecvals=$$$$(macSDKify/' qtbase/configure && \
sed -i.old 's/CGEventCreateMouseEvent(0, kCGEventMouseMoved, pos, 0)/CGEventCreateMouseEvent(0, kCGEventMouseMoved, pos, kCGMouseButtonLeft)/' qtbase/src/plugins/platforms/cocoa/qcocoacursor.mm && \
mkdir -p qtbase/mkspecs/macx-clang-linux &&\
cp -f qtbase/mkspecs/macx-clang/Info.plist.lib qtbase/mkspecs/macx-clang-linux/ &&\
cp -f qtbase/mkspecs/macx-clang/Info.plist.app qtbase/mkspecs/macx-clang-linux/ &&\
cp -f qtbase/mkspecs/macx-clang/qplatformdefs.h qtbase/mkspecs/macx-clang-linux/ &&\
cp -f $($(package)_patch_dir)/mac-qmake.conf qtbase/mkspecs/macx-clang-linux/qmake.conf && \
cp -r qtbase/mkspecs/linux-arm-gnueabi-g++ qtbase/mkspecs/bitcoin-linux-g++ && \
sed -i.old "s/arm-linux-gnueabi-/$(host)-/g" qtbase/mkspecs/bitcoin-linux-g++/qmake.conf && \
patch -p1 -i $($(package)_patch_dir)/fix_qt_pkgconfig.patch &&\
patch -p1 -i $($(package)_patch_dir)/fix_configure_mac.patch &&\
patch -p1 -i $($(package)_patch_dir)/fix_no_printer.patch &&\
patch -p1 -i $($(package)_patch_dir)/fix_rcc_determinism.patch &&\
patch -p1 -i $($(package)_patch_dir)/xkb-default.patch &&\
echo "!host_build: QMAKE_CFLAGS += $($(package)_cflags) $($(package)_cppflags)" >> qtbase/mkspecs/common/gcc-base.conf && \
echo "!host_build: QMAKE_CXXFLAGS += $($(package)_cxxflags) $($(package)_cppflags)" >> qtbase/mkspecs/common/gcc-base.conf && \
echo "!host_build: QMAKE_LFLAGS += $($(package)_ldflags)" >> qtbase/mkspecs/common/gcc-base.conf && \
echo "QMAKE_LINK_OBJECT_MAX = 10" >> qtbase/mkspecs/win32-g++/qmake.conf &&\
echo "QMAKE_LINK_OBJECT_SCRIPT = object_script" >> qtbase/mkspecs/win32-g++/qmake.conf &&\
sed -i.old "s|QMAKE_CFLAGS = |!host_build: QMAKE_CFLAGS = $($(package)_cflags) $($(package)_cppflags) |" qtbase/mkspecs/win32-g++/qmake.conf && \
sed -i.old "s|QMAKE_LFLAGS = |!host_build: QMAKE_LFLAGS = $($(package)_ldflags) |" qtbase/mkspecs/win32-g++/qmake.conf && \
sed -i.old "s|QMAKE_CXXFLAGS = |!host_build: QMAKE_CXXFLAGS = $($(package)_cxxflags) $($(package)_cppflags) |" qtbase/mkspecs/win32-g++/qmake.conf
patch -p1 < $($(package)_patch_dir)/fix-xcb-include-order.patch && \
patch -p1 < $($(package)_patch_dir)/mingw-uuidof.patch && \
patch -p1 < $($(package)_patch_dir)/pidlist_absolute.patch && \
echo "QMAKE_CFLAGS += $($(package)_cflags) $($(package)_cppflags)" >> qtbase/mkspecs/common/gcc-base.conf && \
echo "QMAKE_CXXFLAGS += $($(package)_cxxflags) $($(package)_cppflags)" >> qtbase/mkspecs/common/gcc-base.conf && \
echo "QMAKE_LFLAGS += $($(package)_ldflags)" >> qtbase/mkspecs/common/gcc-base.conf && \
sed -i.old "s|QMAKE_CFLAGS = |QMAKE_CFLAGS = $($(package)_cflags) $($(package)_cppflags) |" qtbase/mkspecs/win32-g++/qmake.conf && \
sed -i.old "s|QMAKE_LFLAGS = |QMAKE_LFLAGS = $($(package)_ldflags) |" qtbase/mkspecs/win32-g++/qmake.conf && \
sed -i.old "s|QMAKE_CXXFLAGS = |QMAKE_CXXFLAGS = $($(package)_cxxflags) $($(package)_cppflags) |" qtbase/mkspecs/win32-g++/qmake.conf
endef
define $(package)_config_cmds
@ -156,26 +145,21 @@ define $(package)_config_cmds
export PKG_CONFIG_LIBDIR=$(host_prefix)/lib/pkgconfig && \
export PKG_CONFIG_PATH=$(host_prefix)/share/pkgconfig && \
./configure $($(package)_config_opts) && \
echo "host_build: QT_CONFIG ~= s/system-zlib/zlib" >> mkspecs/qconfig.pri && \
echo "CONFIG += force_bootstrap" >> mkspecs/qconfig.pri && \
$(MAKE) sub-src-clean && \
cd ../qttranslations && ../qtbase/bin/qmake qttranslations.pro -o Makefile && \
cd translations && ../../qtbase/bin/qmake translations.pro -o Makefile && cd ../.. && \
cd qttools/src/linguist/lrelease/ && ../../../../qtbase/bin/qmake lrelease.pro -o Makefile && \
cd ../lupdate/ && ../../../../qtbase/bin/qmake lupdate.pro -o Makefile && cd ../../../..
cd translations && ../../qtbase/bin/qmake translations.pro -o Makefile && cd ../.. &&\
cd qttools/src/linguist/lrelease/ && ../../../../qtbase/bin/qmake lrelease.pro -o Makefile
endef
define $(package)_build_cmds
$(MAKE) -C src $(addprefix sub-,$($(package)_qt_libs)) && \
$(MAKE) -C ../qttools/src/linguist/lrelease && \
$(MAKE) -C ../qttools/src/linguist/lupdate && \
$(MAKE) -C ../qttranslations
endef
define $(package)_stage_cmds
$(MAKE) -C src INSTALL_ROOT=$($(package)_staging_dir) $(addsuffix -install_subtargets,$(addprefix sub-,$($(package)_qt_libs))) && cd .. && \
$(MAKE) -C src INSTALL_ROOT=$($(package)_staging_dir) $(addsuffix -install_subtargets,$(addprefix sub-,$($(package)_qt_libs))) && cd .. &&\
$(MAKE) -C qttools/src/linguist/lrelease INSTALL_ROOT=$($(package)_staging_dir) install_target && \
$(MAKE) -C qttools/src/linguist/lupdate INSTALL_ROOT=$($(package)_staging_dir) install_target && \
$(MAKE) -C qttranslations INSTALL_ROOT=$($(package)_staging_dir) install_subtargets && \
if `test -f qtbase/src/plugins/platforms/xcb/xcb-static/libxcb-static.a`; then \
cp qtbase/src/plugins/platforms/xcb/xcb-static/libxcb-static.a $($(package)_staging_prefix_dir)/lib; \

66
depends/packages/qt46.mk Normal file
View file

@ -0,0 +1,66 @@
PACKAGE=qt46
$(package)_version=4.6.4
$(package)_download_path=http://download.qt-project.org/archive/qt/4.6/
$(package)_file_name=qt-everywhere-opensource-src-$($(package)_version).tar.gz
$(package)_sha256_hash=9ad4d46c721b53a429ed5a2eecfd3c239a9ab566562f183f99d3125f1a234250
$(package)_dependencies=openssl freetype dbus libX11 xproto libXext libICE libSM
$(package)_patches=stlfix.patch
define $(package)_set_vars
$(package)_config_opts = -prefix $(host_prefix) -headerdir $(host_prefix)/include/qt4 -bindir $(build_prefix)/bin
$(package)_config_opts += -release -no-separate-debug-info -opensource -confirm-license
$(package)_config_opts += -stl -qt-zlib
$(package)_config_opts += -nomake examples -nomake tests -nomake tools -nomake translations -nomake demos -nomake docs
$(package)_config_opts += -no-audio-backend -no-glib -no-nis -no-cups -no-iconv -no-gif -no-pch
$(package)_config_opts += -no-xkb -no-xrender -no-xrandr -no-xfixes -no-xcursor -no-xinerama -no-xsync -no-xinput -no-mitshm -no-xshape
$(package)_config_opts += -no-libtiff -no-fontconfig -openssl-linked
$(package)_config_opts += -no-sql-db2 -no-sql-ibase -no-sql-oci -no-sql-tds -no-sql-mysql
$(package)_config_opts += -no-sql-odbc -no-sql-psql -no-sql-sqlite -no-sql-sqlite2
$(package)_config_opts += -no-xmlpatterns -no-multimedia -no-phonon -no-scripttools -no-declarative
$(package)_config_opts += -no-phonon-backend -no-webkit -no-javascript-jit -no-script
$(package)_config_opts += -no-svg -no-libjpeg -no-libtiff -no-libpng -no-libmng -no-qt3support -no-opengl
$(package)_config_opts_x86_64_linux += -platform linux-g++-64
$(package)_config_opts_i686_linux = -platform linux-g++-32
$(package)_build_env = QT_RCC_TEST=1
endef
define $(package)_preprocess_cmds
sed -i.old "s|/include /usr/include||" config.tests/unix/freetype/freetype.pri && \
sed -i.old "s|src_plugins.depends = src_gui src_sql src_svg|src_plugins.depends = src_gui src_sql|" src/src.pro && \
sed -i.old "s|\.lower(|\.toLower(|g" src/network/ssl/qsslsocket_openssl.cpp && \
sed -i.old "s|Key_BackSpace|Key_Backspace|" src/gui/itemviews/qabstractitemview.cpp && \
sed -i.old "s|/usr/X11R6/lib64|$(host_prefix)/lib|" mkspecs/*/*.conf && \
sed -i.old "s|/usr/X11R6/lib|$(host_prefix)/lib|" mkspecs/*/*.conf && \
sed -i.old "s|/usr/X11R6/include|$(host_prefix)/include|" mkspecs/*/*.conf && \
sed -i.old "s|QMAKE_LFLAGS_SHLIB\t+= -shared|QMAKE_LFLAGS_SHLIB\t+= -shared -Wl,--exclude-libs,ALL|" mkspecs/common/g++.conf && \
sed -i.old "/SSLv2_client_method/d" src/network/ssl/qsslsocket_openssl.cpp src/network/ssl/qsslsocket_openssl_symbols.cpp && \
sed -i.old "/SSLv2_server_method/d" src/network/ssl/qsslsocket_openssl.cpp src/network/ssl/qsslsocket_openssl_symbols.cpp && \
patch -p1 < $($(package)_patch_dir)/stlfix.patch
endef
define $(package)_config_cmds
export PKG_CONFIG_SYSROOT_DIR=/ && \
export PKG_CONFIG_LIBDIR=$(host_prefix)/lib/pkgconfig && \
export PKG_CONFIG_PATH=$(host_prefix)/share/pkgconfig && \
export CPATH=$(host_prefix)/include && \
OPENSSL_LIBS='-L$(host_prefix)/lib -lssl -lcrypto' ./configure $($(package)_config_opts) && \
cd tools/linguist/lrelease; ../../../bin/qmake -o Makefile lrelease.pro
endef
define $(package)_build_cmds
export CPATH=$(host_prefix)/include && \
$(MAKE) -C src && \
$(MAKE) -C tools/linguist/lrelease
endef
define $(package)_stage_cmds
$(MAKE) -C src INSTALL_ROOT=$($(package)_staging_dir) install && \
$(MAKE) -C tools/linguist/lrelease INSTALL_ROOT=$($(package)_staging_dir) install
endef
define $(package)_postprocess_cmds
rm -rf mkspecs/ lib/cmake/ lib/*.prl lib/*.la && \
find native/bin -type f -exec mv {} {}-qt4 \;
endef

View file

@ -4,10 +4,6 @@ $(package)_download_path=http://xorg.freedesktop.org/releases/individual/proto
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
$(package)_sha256_hash=f3f4b23ac8db9c3a9e0d8edb591713f3d70ef9c3b175970dd8823dfc92aa5bb0
define $(package)_preprocess_cmds
cp -f $(BASEDIR)/config.guess $(BASEDIR)/config.sub .
endef
define $(package)_set_vars
$(package)_config_opts=--disable-shared
endef

View file

@ -1,36 +1,26 @@
package=zeromq
$(package)_version=4.3.1
$(package)_download_path=https://github.com/zeromq/libzmq/releases/download/v$($(package)_version)/
$(package)_version=4.0.7
$(package)_download_path=http://download.zeromq.org
$(package)_file_name=$(package)-$($(package)_version).tar.gz
$(package)_sha256_hash=bcbabe1e2c7d0eec4ed612e10b94b112dd5f06fcefa994a0c79a45d835cd21eb
$(package)_patches=0001-fix-build-with-older-mingw64.patch 0002-disable-pthread_set_name_np.patch
$(package)_sha256_hash=e00b2967e074990d0538361cc79084a0a92892df2c6e7585da34e4c61ee47b03
define $(package)_set_vars
$(package)_config_opts=--without-docs --disable-shared --without-libsodium --disable-curve --disable-curve-keygen --disable-perf --disable-Werror --disable-drafts
$(package)_config_opts += --without-libsodium --without-libgssapi_krb5 --without-pgm --without-norm --without-vmci
$(package)_config_opts += --disable-libunwind --disable-radix-tree --without-gcov
$(package)_config_opts=--without-documentation --disable-shared
$(package)_config_opts_linux=--with-pic
endef
define $(package)_preprocess_cmds
patch -p1 < $($(package)_patch_dir)/0001-fix-build-with-older-mingw64.patch && \
patch -p1 < $($(package)_patch_dir)/0002-disable-pthread_set_name_np.patch && \
cp -f $(BASEDIR)/config.guess $(BASEDIR)/config.sub config
endef
define $(package)_config_cmds
$($(package)_autoconf)
endef
define $(package)_build_cmds
$(MAKE) src/libzmq.la
$(MAKE) -C src
endef
define $(package)_stage_cmds
$(MAKE) DESTDIR=$($(package)_staging_dir) install-libLTLIBRARIES install-includeHEADERS install-pkgconfigDATA
$(MAKE) -C src DESTDIR=$($(package)_staging_dir) install
endef
define $(package)_postprocess_cmds
sed -i.old "s/ -lstdc++//" lib/pkgconfig/libzmq.pc && \
rm -rf bin share lib/*.la
rm -rf bin share
endef

View file

@ -1,27 +0,0 @@
package=zlib
$(package)_version=1.2.11
$(package)_download_path=http://www.zlib.net
$(package)_file_name=$(package)-$($(package)_version).tar.gz
$(package)_sha256_hash=c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1
define $(package)_set_vars
$(package)_build_opts= CC="$($(package)_cc)"
$(package)_build_opts+=CFLAGS="$($(package)_cflags) $($(package)_cppflags) -fPIC"
$(package)_build_opts+=RANLIB="$($(package)_ranlib)"
$(package)_build_opts+=AR="$($(package)_ar)"
$(package)_build_opts_darwin+=AR="$($(package)_libtool)"
$(package)_build_opts_darwin+=ARFLAGS="-o"
endef
define $(package)_config_cmds
./configure --static --prefix=$(host_prefix)
endef
define $(package)_build_cmds
$(MAKE) $($(package)_build_opts) libz.a
endef
define $(package)_stage_cmds
$(MAKE) DESTDIR=$($(package)_staging_dir) install $($(package)_build_opts)
endef

View file

@ -0,0 +1,21 @@
--- old/evutil.c 2015-08-28 19:26:23.488765923 -0400
+++ new/evutil.c 2015-08-28 19:27:41.392767019 -0400
@@ -321,15 +321,16 @@
int
evutil_make_listen_socket_reuseable(evutil_socket_t sock)
{
-#ifndef WIN32
int one = 1;
+#ifndef WIN32
/* REUSEADDR on Unix means, "don't hang on to this address after the
* listener is closed." On Windows, though, it means "don't keep other
* processes from binding to this address while we're using it. */
return setsockopt(sock, SOL_SOCKET, SO_REUSEADDR, (void*) &one,
(ev_socklen_t)sizeof(one));
#else
- return 0;
+ return setsockopt(sock, SOL_SOCKET, SO_REUSEADDR, (const char*) &one,
+ (ev_socklen_t)sizeof(one));
#endif
}

View file

@ -0,0 +1,45 @@
--- old/qtbase/src/plugins/platforms/xcb/xcb_qpa_lib.pro 2015-03-17 02:06:42.705930685 +0000
+++ new/qtbase/src/plugins/platforms/xcb/xcb_qpa_lib.pro 2015-03-17 02:08:41.281926351 +0000
@@ -94,8 +94,6 @@
DEFINES += $$QMAKE_DEFINES_XCB
LIBS += $$QMAKE_LIBS_XCB
-QMAKE_CXXFLAGS += $$QMAKE_CFLAGS_XCB
-QMAKE_CFLAGS += $$QMAKE_CFLAGS_XCB
CONFIG += qpa/genericunixfontdatabase
@@ -104,7 +102,8 @@
contains(QT_CONFIG, xcb-qt) {
DEFINES += XCB_USE_RENDER
XCB_DIR = ../../../3rdparty/xcb
- INCLUDEPATH += $$XCB_DIR/include $$XCB_DIR/sysinclude
+ QMAKE_CFLAGS += -I$$XCB_DIR/include -I$$XCB_DIR/sysinclude $$QMAKE_CFLAGS_XCB
+ QMAKE_CXXFLAGS += -I$$XCB_DIR/include -I$$XCB_DIR/sysinclude $$QMAKE_CFLAGS_XCB
LIBS += -lxcb -L$$OUT_PWD/xcb-static -lxcb-static
} else {
LIBS += -lxcb -lxcb-image -lxcb-icccm -lxcb-sync -lxcb-xfixes -lxcb-shm -lxcb-randr -lxcb-shape -lxcb-keysyms
--- old/qtbase/src/plugins/platforms/xcb/xcb-static/xcb-static.pro 2015-03-17 02:07:04.641929383 +0000
+++ new/qtbase/src/plugins/platforms/xcb/xcb-static/xcb-static.pro 2015-03-17 02:10:15.485922059 +0000
@@ -8,7 +8,8 @@
XCB_DIR = ../../../../3rdparty/xcb
-INCLUDEPATH += $$XCB_DIR/include $$XCB_DIR/include/xcb $$XCB_DIR/sysinclude
+QMAKE_CFLAGS += -I$$XCB_DIR/include -I$$XCB_DIR/include/xcb -I$$XCB_DIR/sysinclude
+QMAKE_CXXFLAGS += -I$$XCB_DIR/include -I$$XCB_DIR/include/xcb -I$$XCB_DIR/sysinclude
QMAKE_CXXFLAGS += $$QMAKE_CFLAGS_XCB
QMAKE_CFLAGS += $$QMAKE_CFLAGS_XCB
--- old/qtbase/src/plugins/platforms/xcb/xcb-plugin.pro 2015-07-24 16:02:59.530038830 -0400
+++ new/qtbase/src/plugins/platforms/xcb/xcb-plugin.pro 2015-07-24 16:01:22.106037459 -0400
@@ -11,3 +11,9 @@
qxcbmain.cpp
OTHER_FILES += xcb.json README
+contains(QT_CONFIG, xcb-qt) {
+ DEFINES += XCB_USE_RENDER
+ XCB_DIR = ../../../3rdparty/xcb
+ QMAKE_CFLAGS += -I$$XCB_DIR/include -I$$XCB_DIR/sysinclude $$QMAKE_CFLAGS_XCB
+ QMAKE_CXXFLAGS += -I$$XCB_DIR/include -I$$XCB_DIR/sysinclude $$QMAKE_CFLAGS_XCB
+}

View file

@ -1,50 +0,0 @@
--- old/qtbase/mkspecs/features/mac/sdk.prf 2018-02-08 10:24:48.000000000 -0800
+++ new/qtbase/mkspecs/features/mac/sdk.prf 2018-03-23 10:38:56.000000000 -0700
@@ -8,21 +8,21 @@
defineReplace(xcodeSDKInfo) {
info = $$1
equals(info, "Path"): \
- info = --show-sdk-path
+ infoarg = --show-sdk-path
equals(info, "PlatformPath"): \
- info = --show-sdk-platform-path
+ infoarg = --show-sdk-platform-path
equals(info, "SDKVersion"): \
- info = --show-sdk-version
+ infoarg = --show-sdk-version
sdk = $$2
isEmpty(sdk): \
sdk = $$QMAKE_MAC_SDK
isEmpty(QMAKE_MAC_SDK.$${sdk}.$${info}) {
- QMAKE_MAC_SDK.$${sdk}.$${info} = $$system("/usr/bin/xcrun --sdk $$sdk $$info 2>/dev/null")
+ QMAKE_MAC_SDK.$${sdk}.$${info} = $$system("/usr/bin/xcrun --sdk $$sdk $$infoarg 2>/dev/null")
# --show-sdk-platform-path won't work for Command Line Tools; this is fine
# only used by the XCTest backend to testlib
- isEmpty(QMAKE_MAC_SDK.$${sdk}.$${info}):if(!isEmpty(QMAKE_XCODEBUILD_PATH)|!equals(info, "--show-sdk-platform-path")): \
- error("Could not resolve SDK $$info for \'$$sdk\'")
+ isEmpty(QMAKE_MAC_SDK.$${sdk}.$${info}):if(!isEmpty(QMAKE_XCODEBUILD_PATH)|!equals(infoarg, "--show-sdk-platform-path")): \
+ error("Could not resolve SDK $$info for \'$$sdk\' using $$infoarg")
cache(QMAKE_MAC_SDK.$${sdk}.$${info}, set stash, QMAKE_MAC_SDK.$${sdk}.$${info})
}
--- old/qtbase/configure 2018-02-08 10:24:48.000000000 -0800
+++ new/qtbase/configure 2018-03-23 05:42:29.000000000 -0700
@@ -232,8 +232,13 @@
sdk=$(getSingleQMakeVariable "QMAKE_MAC_SDK" "$1")
if [ -z "$sdk" ]; then echo "QMAKE_MAC_SDK must be set when building on Mac" >&2; exit 1; fi
- sysroot=$(/usr/bin/xcrun --sdk $sdk --show-sdk-path 2>/dev/null)
- if [ -z "$sysroot" ]; then echo "Failed to resolve SDK path for '$sdk'" >&2; exit 1; fi
+ sysroot=$(getSingleQMakeVariable "QMAKE_MAC_SDK_PATH" "$1")
+
+ echo "sysroot pre-configured as $sysroot";
+ if [ -z "$sysroot" ]; then
+ sysroot=$(/usr/bin/xcrun --sdk $sdk --show-sdk-path 2>/dev/null)
+ if [ -z "$sysroot" ]; then echo "Failed to resolve SDK path for '$sdk'" >&2; exit 1; fi
+ fi
case "$sdk" in
macosx*)

View file

@ -1,19 +0,0 @@
--- x/qtbase/src/plugins/platforms/cocoa/qprintengine_mac_p.h
+++ y/qtbase/src/plugins/platforms/cocoa/qprintengine_mac_p.h
@@ -52,6 +52,7 @@
//
#include <QtCore/qglobal.h>
+#include <qpa/qplatformprintdevice.h>
#ifndef QT_NO_PRINTER
--- x/qtbase/src/plugins/plugins.pro
+++ y/qtbase/src/plugins/plugins.pro
@@ -8,6 +8,3 @@ qtHaveModule(gui) {
qtConfig(imageformatplugin): SUBDIRS *= imageformats
!android:qtConfig(library): SUBDIRS *= generic
}
-
-!winrt:qtHaveModule(printsupport): \
- SUBDIRS += printsupport

View file

@ -1,11 +0,0 @@
--- old/qtbase/mkspecs/features/qt_module.prf
+++ new/qtbase/mkspecs/features/qt_module.prf
@@ -245,7 +245,7 @@
load(qt_targets)
# this builds on top of qt_common
-!internal_module:!lib_bundle:if(unix|mingw) {
+unix|mingw {
CONFIG += create_pc
QMAKE_PKGCONFIG_DESTDIR = pkgconfig
host_build: \

View file

@ -1,15 +0,0 @@
--- old/qtbase/src/tools/rcc/rcc.cpp
+++ new/qtbase/src/tools/rcc/rcc.cpp
@@ -207,7 +207,11 @@ void RCCFileInfo::writeDataInfo(RCCResourceLibrary &lib)
if (lib.formatVersion() >= 2) {
// last modified time stamp
const QDateTime lastModified = m_fileInfo.lastModified();
- lib.writeNumber8(quint64(lastModified.isValid() ? lastModified.toMSecsSinceEpoch() : 0));
+ quint64 lastmod = quint64(lastModified.isValid() ? lastModified.toMSecsSinceEpoch() : 0);
+ static const quint64 sourceDate = 1000 * qgetenv("QT_RCC_SOURCE_DATE_OVERRIDE").toULongLong();
+ if (sourceDate != 0)
+ lastmod = sourceDate;
+ lib.writeNumber8(lastmod);
if (text || pass1)
lib.writeChar('\n');
}

View file

@ -1,5 +1,6 @@
MAKEFILE_GENERATOR = UNIX
CONFIG += app_bundle incremental global_init_link_order lib_version_first plugin_no_soname absolute_library_soname
DEFINES += QT_NO_PRINTER QT_NO_PRINTDIALOG
QMAKE_INCREMENTAL_STYLE = sublib
include(../common/macx.conf)
include(../common/gcc-base-mac.conf)
@ -10,15 +11,14 @@ QMAKE_XCODE_VERSION=4.3
QMAKE_XCODE_DEVELOPER_PATH=/Developer
QMAKE_MACOSX_DEPLOYMENT_TARGET = $${MAC_MIN_VERSION}
QMAKE_MAC_SDK=macosx
QMAKE_MAC_SDK.macosx.Path = $${MAC_SDK_PATH}
QMAKE_MAC_SDK.macosx.path = $${MAC_SDK_PATH}
QMAKE_MAC_SDK.macosx.platform_name = macosx
QMAKE_MAC_SDK.macosx.SDKVersion = $${MAC_SDK_VERSION}
QMAKE_MAC_SDK.macosx.PlatformPath = /phony
QMAKE_APPLE_DEVICE_ARCHS=x86_64
!host_build: QMAKE_CFLAGS += -target $${MAC_TARGET}
!host_build: QMAKE_OBJECTIVE_CFLAGS += $$QMAKE_CFLAGS
!host_build: QMAKE_CXXFLAGS += $$QMAKE_CFLAGS
!host_build: QMAKE_LFLAGS += -target $${MAC_TARGET} -mlinker-version=$${MAC_LD64_VERSION}
QMAKE_MAC_SDK.macosx.version = $${MAC_SDK_VERSION}
QMAKE_MAC_SDK.macosx.platform_path = /phony
QMAKE_CFLAGS += -target $${MAC_TARGET}
QMAKE_OBJECTIVE_CFLAGS += $$QMAKE_CFLAGS
QMAKE_CXXFLAGS += $$QMAKE_CFLAGS
QMAKE_LFLAGS += -target $${MAC_TARGET} -mlinker-version=$${MAC_LD64_VERSION}
QMAKE_AR = $${CROSS_COMPILE}ar cq
QMAKE_RANLIB=$${CROSS_COMPILE}ranlib
QMAKE_LIBTOOL=$${CROSS_COMPILE}libtool

View file

@ -0,0 +1,44 @@
--- old/qtbase/src/plugins/platforms/windows/qwindowscontext.cpp 2015-06-20 17:40:20.956781548 -0400
+++ new/qtbase/src/plugins/platforms/windows/qwindowscontext.cpp 2015-06-20 17:29:32.052772416 -0400
@@ -69,7 +69,7 @@
#include <stdlib.h>
#include <stdio.h>
#include <windowsx.h>
-#ifndef Q_OS_WINCE
+#if !defined(Q_OS_WINCE) && (!defined(USE___UUIDOF) || (defined(USE___UUIDOF) && USE___UUIDOF == 1))
# include <comdef.h>
#endif
@@ -762,7 +762,7 @@
HWND_MESSAGE, NULL, (HINSTANCE)GetModuleHandle(0), NULL);
}
-#ifndef Q_OS_WINCE
+#if !defined(Q_OS_WINCE) && (!defined(USE___UUIDOF) || (defined(USE___UUIDOF) && USE___UUIDOF == 1))
// Re-engineered from the inline function _com_error::ErrorMessage().
// We cannot use it directly since it uses swprintf_s(), which is not
// present in the MSVCRT.DLL found on Windows XP (QTBUG-35617).
@@ -781,7 +781,7 @@
return QStringLiteral("IDispatch error #") + QString::number(wCode);
return QStringLiteral("Unknown error 0x0") + QString::number(comError.Error(), 16);
}
-#endif // !Q_OS_WINCE
+#endif // !defined(Q_OS_WINCE) && (!defined(USE___UUIDOF) || (defined(USE___UUIDOF) && USE___UUIDOF == 1))
/*!
\brief Common COM error strings.
@@ -846,12 +846,12 @@
default:
break;
}
-#ifndef Q_OS_WINCE
+#if !defined(Q_OS_WINCE) && (!defined(USE___UUIDOF) || (defined(USE___UUIDOF) && USE___UUIDOF == 1))
_com_error error(hr);
result += QByteArrayLiteral(" (");
result += errorMessageFromComError(error);
result += ')';
-#endif // !Q_OS_WINCE
+#endif // !defined(Q_OS_WINCE) && (!defined(USE___UUIDOF) || (defined(USE___UUIDOF) && USE___UUIDOF == 1))
return result;
}

View file

@ -0,0 +1,37 @@
diff -dur old/qtbase/src/plugins/platforms/windows/qwindowscontext.h new/qtbase/src/plugins/platforms/windows/qwindowscontext.h
--- old/qtbase/src/plugins/platforms/windows/qwindowscontext.h 2015-06-29 22:04:40.000000000 +0200
+++ new/qtbase/src/plugins/platforms/windows/qwindowscontext.h 2015-11-01 12:55:59.751234846 +0100
@@ -124,10 +124,18 @@
inline void init();
typedef HRESULT (WINAPI *SHCreateItemFromParsingName)(PCWSTR, IBindCtx *, const GUID&, void **);
+#if defined(Q_CC_MINGW) && (!defined(__MINGW64_VERSION_MAJOR) || __MINGW64_VERSION_MAJOR < 3)
+ typedef HRESULT (WINAPI *SHGetKnownFolderIDList)(const GUID &, DWORD, HANDLE, ITEMIDLIST **);
+#else
typedef HRESULT (WINAPI *SHGetKnownFolderIDList)(const GUID &, DWORD, HANDLE, PIDLIST_ABSOLUTE *);
+#endif
typedef HRESULT (WINAPI *SHGetStockIconInfo)(int , int , _SHSTOCKICONINFO *);
typedef HRESULT (WINAPI *SHGetImageList)(int, REFIID , void **);
+#if defined(Q_CC_MINGW) && (!defined(__MINGW64_VERSION_MAJOR) || __MINGW64_VERSION_MAJOR < 3)
+ typedef HRESULT (WINAPI *SHCreateItemFromIDList)(const ITEMIDLIST *, REFIID, void **);
+#else
typedef HRESULT (WINAPI *SHCreateItemFromIDList)(PCIDLIST_ABSOLUTE, REFIID, void **);
+#endif
SHCreateItemFromParsingName sHCreateItemFromParsingName;
SHGetKnownFolderIDList sHGetKnownFolderIDList;
diff -dur old/qtbase/src/plugins/platforms/windows/qwindowsdialoghelpers.cpp new/qtbase/src/plugins/platforms/windows/qwindowsdialoghelpers.cpp
--- old/qtbase/src/plugins/platforms/windows/qwindowsdialoghelpers.cpp 2015-06-29 22:04:40.000000000 +0200
+++ new/qtbase/src/plugins/platforms/windows/qwindowsdialoghelpers.cpp 2015-11-01 13:41:09.503149772 +0100
@@ -1008,7 +1008,11 @@
qWarning() << __FUNCTION__ << ": Invalid CLSID: " << url.path();
return Q_NULLPTR;
}
+#if defined(Q_CC_MINGW) && (!defined(__MINGW64_VERSION_MAJOR) || __MINGW64_VERSION_MAJOR < 3)
+ ITEMIDLIST *idList;
+#else
PIDLIST_ABSOLUTE idList;
+#endif
HRESULT hr = QWindowsContext::shell32dll.sHGetKnownFolderIDList(uuid, 0, 0, &idList);
if (FAILED(hr)) {
qErrnoWarning("%s: SHGetKnownFolderIDList(%s)) failed", __FUNCTION__, qPrintable(url.toString()));

View file

@ -1,26 +0,0 @@
--- old/qtbase/src/gui/configure.pri 2018-06-06 17:28:10.000000000 -0400
+++ new/qtbase/src/gui/configure.pri 2018-08-17 18:43:01.589384567 -0400
@@ -43,18 +43,11 @@
}
defineTest(qtConfTest_xkbConfigRoot) {
- qtConfTest_getPkgConfigVariable($${1}): return(true)
-
- for (dir, $$list("/usr/share/X11/xkb", "/usr/local/share/X11/xkb")) {
- exists($$dir) {
- $${1}.value = $$dir
- export($${1}.value)
- $${1}.cache += value
- export($${1}.cache)
- return(true)
- }
- }
- return(false)
+ $${1}.value = "/usr/share/X11/xkb"
+ export($${1}.value)
+ $${1}.cache += value
+ export($${1}.cache)
+ return(true)
}
defineTest(qtConfTest_qpaDefaultPlatform) {

View file

@ -0,0 +1,10 @@
--- old/config.tests/unix/stl/stltest.cpp 2011-06-23 03:45:23.000000000 -0400
+++ new/config.tests/unix/stl/stltest.cpp 2014-08-28 00:54:04.154837604 -0400
@@ -49,6 +49,7 @@
#include <vector>
#include <algorithm>
#include <iostream>
+#include <cstddef>
// something mean to see if the compiler and C++ standard lib are good enough
template<class K, class T>

View file

@ -1,30 +0,0 @@
From 1a159c128c69a42d90819375c06a39994f3fbfc1 Mon Sep 17 00:00:00 2001
From: Cory Fields <cory-nospam-@coryfields.com>
Date: Tue, 28 Nov 2017 20:33:25 -0500
Subject: [PATCH] fix build with older mingw64
---
src/windows.hpp | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/src/windows.hpp b/src/windows.hpp
index 99e889d..e69038e 100644
--- a/src/windows.hpp
+++ b/src/windows.hpp
@@ -55,6 +55,13 @@
#include <winsock2.h>
#include <windows.h>
#include <mswsock.h>
+
+#if defined __MINGW64_VERSION_MAJOR && __MINGW64_VERSION_MAJOR < 4
+// Workaround for mingw-w64 < v4.0 which did not include ws2ipdef.h in iphlpapi.h.
+// Fixed in mingw-w64 by 9bd8fe9148924840d315b4c915dd099955ea89d1.
+#include <ws2def.h>
+#include <ws2ipdef.h>
+#endif
#include <iphlpapi.h>
#if !defined __MINGW32__
--
2.7.4

View file

@ -1,35 +0,0 @@
From 6e6b47d5ab381c3df3b30bb0b0a6cf210dfb1eba Mon Sep 17 00:00:00 2001
From: Cory Fields <cory-nospam-@coryfields.com>
Date: Mon, 5 Mar 2018 14:22:05 -0500
Subject: [PATCH] disable pthread_set_name_np
pthread_set_name_np adds a Glibc requirement on >= 2.12.
---
src/thread.cpp | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/src/thread.cpp b/src/thread.cpp
index 4fc59c3e..c3fdfd46 100644
--- a/src/thread.cpp
+++ b/src/thread.cpp
@@ -220,7 +220,7 @@ void zmq::thread_t::setThreadName(const char *name_)
*/
if (!name_)
return;
-
+#if 0
#if defined(ZMQ_HAVE_PTHREAD_SETNAME_1)
int rc = pthread_setname_np(name_);
if(rc) return;
@@ -233,6 +233,8 @@ void zmq::thread_t::setThreadName(const char *name_)
#elif defined(ZMQ_HAVE_PTHREAD_SET_NAME)
pthread_set_name_np(descriptor, name_);
#endif
+#endif
+ return;
}
#endif
--
2.11.1

View file

@ -1,10 +0,0 @@
.git
.gitignore
.travis.yml
README.md
LICENSE
hooks
Dockerfile
Makefile
*.sh
*.patch

View file

@ -1,43 +0,0 @@
FROM ubuntu:16.04
ENV LANG C.UTF-8
RUN set -xe; \
apt-get update; \
apt-get install --no-install-recommends -y build-essential libtool autotools-dev automake pkg-config git wget apt-utils \
librsvg2-bin libtiff-tools cmake imagemagick libcap-dev libz-dev libbz2-dev python-setuptools xz-utils ccache g++-multilib \
g++-mingw-w64-i686 mingw-w64-i686-dev bsdmainutils curl ca-certificates g++-mingw-w64-x86-64 mingw-w64-x86-64-dev; \
rm -rf /var/lib/apt/lists/*;
RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -; \
echo 'deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-8 main' >> /etc/apt/sources.list; \
apt-get update; \
apt-get install --no-install-recommends -y clang-8 lldb-8 lld-8 libc++-8-dev; \
rm -rf /var/lib/apt/lists/*;
RUN update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang-cpp-8 80; \
update-alternatives --install /usr/bin/clang clang /usr/bin/clang-8 80; \
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++ 80; \
update-alternatives --install /usr/bin/cc cc /usr/bin/clang 80; \
update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix; \
update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix; \
/usr/sbin/update-ccache-symlinks; \
cd /usr/include/c++ && ln -s /usr/lib/llvm-8/include/c++/v1; \
cd /usr/lib/llvm-8/lib && ln -s libc++abi.so.1 libc++abi.so;
ARG VCS_REF
ARG BUILD_DATE
LABEL maintainer="blockchain@lbry.com" \
decription="build_lbrycrd" \
version="1.1" \
org.label-schema.name="build_lbrycrd" \
org.label-schema.description="Use this to generate a reproducible build of LBRYcrd" \
org.label-schema.build-date=$BUILD_DATE \
org.label-schema.vcs-ref=$VCS_REF \
org.label-schema.vcs-url="https://github.com/lbryio/lbrycrd" \
org.label-schema.schema-version="1.0.0-rc1" \
org.label-schema.vendor="LBRY" \
org.label-schema.docker.cmd="docker build --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` --build-arg VCS_REF=`git rev-parse --short HEAD` -t lbry/build_lbrycrd packaging"
ENV PATH "/usr/lib/ccache:$PATH"
WORKDIR /home
CMD ["/bin/bash"]

View file

@ -1,47 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# NOTE: this requires that you get the MacOS SDK separately.
# To acquire it, you will need to log into the Apple dev portal.
# From there, you download an Xcode package. Recommended: 7.3.1
# You can extract the SDK from that using contrib/macdeploy/extract
# you will need a folder like this: depends/SDKs/MacOSOSX10.11.sdk
# and ensure that the darwin.mk file version correspondes to the SDK.
if which dpkg-query >/dev/null; then
if dpkg-query -W librsvg2-bin libtiff-tools cmake imagemagick libcap-dev libz-dev libbz2-dev python-setuptools \
build-essential libtool autotools-dev automake pkg-config bsdmainutils curl ca-certificates; then
echo "All dependencies satisfied."
else
echo "Missing dependencies detected. Exiting..."
exit 1
fi
fi
if [ ! -e depends/SDKs/MacOSX10.11.sdk ]; then
echo "Missing depends/SDKs/MacOSX10.11.sdk"
exit 1
fi
if which ccache >/dev/null; then
echo "ccache config:"
ccache -ps
fi
pushd depends
make -j$(getconf _NPROCESSORS_ONLN) HOST=x86_64-apple-darwin14 NO_QT=1 V=1
popd
./autogen.sh
DEPS_DIR=$(pwd)/depends/x86_64-apple-darwin14
CONFIG_SITE=${DEPS_DIR}/share/config.site ./configure --enable-reduce-exports --without-gui --with-icu="${DEPS_DIR}" --enable-static --disable-shared
make -j$(getconf _NPROCESSORS_ONLN)
${DEPS_DIR}/native/bin/x86_64-apple-darwin14-strip src/lbrycrdd src/lbrycrd-cli src/lbrycrd-tx
if which ccache >/dev/null; then
echo "ccache stats:"
ccache -s
fi
echo "OSX 64bit build is complete"

View file

@ -1,37 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
if which dpkg-query >/dev/null; then
if dpkg-query -W libtool autotools-dev automake pkg-config bsdmainutils curl ca-certificates; then
echo "All dependencies satisfied."
else
echo "Missing dependencies detected. Exiting..."
exit 1
fi
fi
if which ccache >/dev/null; then
echo "ccache config:"
ccache -ps
fi
export CXXFLAGS="${CXXFLAGS:--frecord-gcc-switches}"
echo "CXXFLAGS set to $CXXFLAGS"
cd depends
make -j$(getconf _NPROCESSORS_ONLN) HOST=x86_64-pc-linux-gnu NO_QT=1 V=1
cd ..
./autogen.sh
DEPS_DIR=$(pwd)/depends/x86_64-pc-linux-gnu
CONFIG_SITE=${DEPS_DIR}/share/config.site ./configure --enable-static --disable-shared --with-pic --without-gui
make -j$(getconf _NPROCESSORS_ONLN)
strip src/lbrycrdd src/lbrycrd-cli src/lbrycrd-tx
if which ccache >/dev/null; then
echo "ccache stats:"
ccache -s
fi
echo "Linux 64bit build is complete"

64
packaging/build_windows.sh Executable file
View file

@ -0,0 +1,64 @@
#! /bin/bash
set -euo pipefail
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
g++-mingw-w64-i686 mingw-w64-i686-dev g++-mingw-w64-x86-64 \
mingw-w64-x86-64-dev build-essential libtool autotools-dev automake pkg-config \
libssl-dev libevent-dev bsdmainutils curl ca-certificates
echo "1" | sudo update-alternatives --config x86_64-w64-mingw32-g++
echo "1" | sudo update-alternatives --config x86_64-w64-mingw32-gcc
#################################################################
# Build ICU for Linux first so that we can cross compile it below
# It's a strange ICU thing in that it requries a working
# Linux build of itself to be used as part of the cross-compile
#################################################################
icu_version=63.1
icu_release=icu4c-63_1-src.tgz
staging_dir=/tmp/icu_staging
icu_linux_dir=$staging_dir/build_icu_linux
mkdir -p $staging_dir
pushd $staging_dir
wget -c http://download.icu-project.org/files/icu4c/$icu_version/$icu_release
tar -xzf $icu_release
pushd icu/source
CC="gcc" CXX="g++" ./runConfigureICU Linux --prefix=$icu_linux_dir --enable-extras=no --enable-strict=no --enable-static --enable-shared=no --enable-tests=no --enable-samples=no --enable-dyload=no
make -j4
make install
popd
popd
export CXXFLAGS="-std=c++11"
pushd depends
# Remove the dir saying that dependencies are built (although ccache
# is still enabled).
rm -rf built
mkdir -p sources
cp "$staging_dir/$icu_release" sources/
# Build and install the cross compiled ICU package.
make -j4 HOST=x86_64-w64-mingw32 NO_QT=1 ICU_ONLY=1
# Then build the rest of the dependencies (now that it exists and we
# can determine the location for it).
icu_mingw_dir=$(find /tmp/icu_install -name x86_64-w64-mingw32 -type d)
make -j4 HOST=x86_64-w64-mingw32 NO_QT=1 ICU_DIR=$icu_mingw_dir V=1
popd
./autogen.sh
echo "Using --with-icu=$icu_mingw_dir"
PREFIX=`pwd`/depends/x86_64-w64-mingw32
CC="x86_64-w64-mingw32-gcc" CXX="x86_64-w64-mingw32-g++" ./configure --prefix=$PREFIX --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --without-gui --with-icu=$icu_mingw_dir --enable-static --disable-shared
./configure --prefix=$PREFIX --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --without-gui --with-icu=$icu_mingw_dir --enable-static --disable-shared
make -j4
rm -rf $staging_dir
# Remove hardcoded cross compiled ICU package path.
rm -rf /tmp/icu_install
echo "Windows build is complete"

View file

@ -1,38 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
if which dpkg-query >/dev/null; then
if dpkg-query -W g++-mingw-w64-i686 mingw-w64-i686-dev \
build-essential libtool autotools-dev automake pkg-config \
bsdmainutils curl ca-certificates; then
echo "All dependencies satisfied."
else
echo "Missing dependencies detected. Exiting..."
exit 1
fi
# sudo update-alternatives --config i686-w64-mingw32-g++ # you have to select posix
fi
if which ccache >/dev/null; then
echo "ccache config:"
ccache -ps
fi
pushd depends
make -j$(getconf _NPROCESSORS_ONLN) HOST=i686-w64-mingw32 NO_QT=1 V=1
popd
./autogen.sh
DEPS_DIR=$(pwd)/depends/i686-w64-mingw32
CONFIG_SITE=${DEPS_DIR}/share/config.site ./configure --prefix=/ --without-gui --with-icu="$DEPS_DIR" --enable-static --disable-shared
make -j$(getconf _NPROCESSORS_ONLN)
i686-w64-mingw32-strip src/lbrycrdd.exe src/lbrycrd-cli.exe src/lbrycrd-tx.exe
if which ccache >/dev/null; then
echo "ccache stats:"
ccache -s
fi
echo "Windows 32bit build is complete"

View file

@ -1,37 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
if which dpkg-query >/dev/null; then
if dpkg-query -W g++-mingw-w64-x86-64 mingw-w64-x86-64-dev \
build-essential libtool autotools-dev automake pkg-config \
bsdmainutils curl ca-certificates; then
echo "All dependencies satisfied."
else
echo "Missing dependencies detected. Exiting..."
exit 1
fi
#sudo update-alternatives --config x86_64-w64-mingw32-g++ # you have to select posix
fi
if which ccache >/dev/null; then
echo "ccache config:"
ccache -ps
fi
pushd depends
make -j$(getconf _NPROCESSORS_ONLN) HOST=x86_64-w64-mingw32 NO_QT=1 V=1
popd
./autogen.sh
DEPS_DIR=$(pwd)/depends/x86_64-w64-mingw32
CONFIG_SITE=${DEPS_DIR}/share/config.site ./configure --prefix=/ --without-gui --with-icu="$DEPS_DIR" --enable-static --disable-shared
make -j$(getconf _NPROCESSORS_ONLN)
x86_64-w64-mingw32-strip src/lbrycrdd.exe src/lbrycrd-cli.exe src/lbrycrd-tx.exe
if which ccache >/dev/null; then
echo "ccache stats:"
ccache -s
fi
echo "Windows 64bit build is complete"

View file

@ -0,0 +1,23 @@
diff --git a/src/Makefile.am b/src/Makefile.am
index a9f0a94..d71bfeb 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -64,12 +64,12 @@ if ENABLE_ZMQ
EXTRA_LIBRARIES += libbitcoin_zmq.a
endif
-if BUILD_BITCOIN_LIBS
-lib_LTLIBRARIES = libbitcoinconsensus.la
-LIBBITCOINCONSENSUS=libbitcoinconsensus.la
-else
-LIBBITCOINCONSENSUS=
-endif
+# if BUILD_BITCOIN_LIBS
+# lib_LTLIBRARIES = libbitcoinconsensus.la
+# LIBBITCOINCONSENSUS=libbitcoinconsensus.la
+# else
+# LIBBITCOINCONSENSUS=
+# endif
bin_PROGRAMS =
TESTS =

451
reproducible_build.sh Executable file
View file

@ -0,0 +1,451 @@
#!/bin/bash
set -euox pipefail
function HELP {
echo "Build lbrycrd"
echo "-----"
echo "When run without any arguments, this script expects the current directory"
echo "to be the lbrycrd repo and it builds what is in that directory"
echo
echo "This is a long build process so it can be split into two parts"
echo "Specify the -d flag to build only the dependencies"
echo "and the -l flag to build only lbrycrd. This will fail"
echo "if the dependencies weren't built earlier"
echo
echo "Optional arguments:"
echo
echo "-f: check formatting of committed code relative to master"
echo "-r: remove intermediate files."
echo "-l: build only lbrycrd"
echo "-d: build only the dependencies"
echo "-o: timeout build after 40 minutes"
echo "-t: turn trace on"
echo "-h: show help"
exit 1
}
CLEAN=false
CHECK_CODE_FORMAT=false
BUILD_DEPENDENCIES=true
BUILD_LBRYCRD=true
TIMEOUT=false
THREE_MB=3145728
# this flag gets set to False if
# the script exits due to a timeout
OUTPUT_LOG=true
while getopts :crfldoth:w:d: FLAG; do
case $FLAG in
r)
CLEAN=true
;;
f)
CHECK_CODE_FORMAT=true
;;
l)
BUILD_DEPENDENCIES=false
;;
d)
BUILD_LBRYCRD=false
;;
o)
TIMEOUT=true
;;
t)
set -o xtrace
;;
h)
HELP
;;
\?) #unrecognized option - show help
echo "Option -$OPTARG not allowed."
HELP
;;
:)
echo "Option -$OPTARG requires an argument."
HELP
;;
esac
done
shift $((OPTIND-1))
SUDO=''
if (( EUID != 0 )); then
SUDO='sudo'
fi
if [ "$(basename "$PWD")" != "lbrycrd" ]; then
echo "Not currently in the lbrycrd directory. Cowardly refusing to go forward"
exit 1
fi
SOURCE_DIR=$PWD
if [ -z "${TRAVIS_OS_NAME+x}" ]; then
if [ "$(uname -s)" = "Darwin" ]; then
OS_NAME="osx"
else
OS_NAME="linux"
fi
else
OS_NAME="${TRAVIS_OS_NAME}"
fi
if [ -z "${TRAVIS_BUILD_DIR+x}" ]; then
START_TIME_FILE="$PWD/start_time"
else
# if we are on travis (the primary use case for setting a timeout)
# this file is created when the build starts
START_TIME_FILE="$TRAVIS_BUILD_DIR/start_time"
fi
rm -f ${START_TIME_FILE}
date +%s > ${START_TIME_FILE}
NEXT_TIME=60
function exit_at_60() {
if [ -f "${START_TIME_FILE}" ]; then
NOW=$(date +%s)
START=$(cat "${START_TIME_FILE}")
TIMEOUT_SECS=3600 # 60 * 60
TIME=$((NOW - START))
if (( TIME > NEXT_TIME )); then
echo "Build has taken $((TIME / 60)) minutes: $1"
NEXT_TIME=$((TIME + 60))
fi
if [ "$TIMEOUT" = true ] && (( TIME > TIMEOUT_SECS )); then
echo 'Exiting at 60 minutes to allow the cache to populate'
OUTPUT_LOG=false
exit 1
fi
fi
}
# two arguments
# - pid (probably from $!)
# - echo message
function wait_and_echo() {
PID=$1
TIME=0
SLEEP=3
# loop until the process is no longer running
# check every $SLEEP seconds, echoing a message every minute
while (ps -p "${PID}" > /dev/null); do
exit_at_60 "$2"
sleep "${SLEEP}"
done
}
# run a command ($1) in the background
# logging its stdout and stderr to $2
# and wait until it completed
function background() {
eval $1 >> "$2" 2>&1 &
BACKGROUND_PID=$!
(
set +xe # do not echo each sleep call in trace mode
wait_and_echo $BACKGROUND_PID "$3"
)
wait $BACKGROUND_PID
}
function cleanup() {
rv=$?
if [ $rv -eq 0 ]; then
return $rv
fi
# cat the log file if it exists
if [ -f "$2" ] && [ "${OUTPUT_LOG}" = true ]; then
echo
echo "Output of log file $2"
echo
cat "$2"
# tail -n 200 "$2"
echo
fi
# delete the build directory
rm -rf "$1"
echo "Build failed. Removing $1"
exit $rv
}
function cat_and_exit() {
rv=$?
# cat the log file if it exists
if [ -f "$1" ] && [ "${OUTPUT_LOG}" = true ]; then
echo
echo "Output of log file $1"
echo
# This used to be the last 3MB but outputing that
# caused problems on travis.
# Hopefully the last 1000 lines is enough
# to debug whatever went wrong
tail -n 1000 "$1"
echo
fi
exit $rv
}
function brew_if_not_installed() {
if ! brew ls | grep $1 --quiet; then
brew install $1
fi
}
function install_brew_packages() {
brew update > /dev/null
brew unlink python
brew_if_not_installed autoconf
brew_if_not_installed automake
# something weird happened where glibtoolize was failing to find
# sed, and reinstalling fixes it.
brew reinstall libtool
brew_if_not_installed pkg-config
brew_if_not_installed protobuf
brew_if_not_installed gmp
if [ "${CHECK_CODE_FORMAT}" = true ]; then
brew_if_not_installed clang-format
fi
}
function install_apt_packages() {
if [ -d "${OUTPUT_DIR}" ]; then
return 0
fi
if [ -z "${TRAVIS+x}" ]; then
# if not on travis, its nice to see progress
QUIET=""
else
QUIET="-qq"
fi
# get the required OS packages
$SUDO apt-get ${QUIET} update
$SUDO apt-get ${QUIET} install -y --no-install-recommends \
build-essential python-dev libbz2-dev libtool \
autotools-dev autoconf git pkg-config wget \
ca-certificates automake bsdmainutils
if [ "${CHECK_CODE_FORMAT}" = true ]; then
$SUDO apt-get ${QUIET} install -y --no-install-recommends \
clang-format-3.9
fi
}
function build_dependencies() {
if [ "${OS_NAME}" = "osx" ]; then
PARALLEL="-j $(sysctl -n hw.ncpu)"
install_brew_packages
else
PARALLEL="-j $(grep -c processor /proc/cpuinfo)"
install_apt_packages
fi
if [ "$CLEAN" = true ]; then
rm -rf "${LBRYCRD_DEPENDENCIES}"
rm -rf "${OUTPUT_DIR}"
fi
mkdir -p "${LBRYCRD_DEPENDENCIES}"
# Download required dependencies (if not already present)
pushd ${LBRYCRD_DEPENDENCIES} > /dev/null
if [ ! -f db-4.8.30.NC.zip ]; then
wget http://download.oracle.com/berkeley-db/db-4.8.30.NC.zip
unzip -o -q db-4.8.30.NC.zip
fi
if [ ! -f libevent-2.1.8-stable.tar.gz ]; then
wget https://github.com/libevent/libevent/releases/download/release-2.1.8-stable/libevent-2.1.8-stable.tar.gz
tar -xzf libevent-2.1.8-stable.tar.gz
fi
if [ ! -f openssl-1.0.2r.tar.gz ]; then
wget https://www.openssl.org/source/openssl-1.0.2r.tar.gz
tar -xzf openssl-1.0.2r.tar.gz
fi
if [ ! -f icu4c-63_1-src.tgz ]; then
wget http://download.icu-project.org/files/icu4c/63.1/icu4c-63_1-src.tgz
tar -xzf icu4c-63_1-src.tgz
fi
if [ ! -f boost_1_64_0.tar.bz2 ]; then
wget https://dl.bintray.com/boostorg/release/1.64.0/source/boost_1_64_0.tar.bz2
tar -xjf boost_1_64_0.tar.bz2
fi
mkdir -p "${LOG_DIR}"
build_dependency "${OPENSSL_PREFIX}" "${LOG_DIR}/openssl_build.log" build_openssl
build_dependency "${ICU_PREFIX}" "${LOG_DIR}/icu_build.log" build_icu
build_dependency "${BDB_PREFIX}" "${LOG_DIR}/bdb_build.log" build_bdb
set +u
export PKG_CONFIG_PATH="${PKG_CONFIG_PATH}:${OPENSSL_PREFIX}/lib/pkgconfig"
set -u
build_dependency "${BOOST_PREFIX}" "${LOG_DIR}/boost_build.log" build_boost
build_dependency "${LIBEVENT_PREFIX}" "${LOG_DIR}/libevent_build.log" build_libevent
}
function build_bdb() {
BDB_LOG="$1"
if [ "${OS_NAME}" = "osx" ]; then
# TODO: make this handle already patched files
patch db-4.8.30.NC/dbinc/atomic.h < ../contrib/patches/atomic.patch
fi
cd db-4.8.30.NC/build_unix
echo "Building bdb. tail -f $BDB_LOG to see the details and monitor progress"
../dist/configure --prefix="${BDB_PREFIX}" --enable-cxx --disable-shared --with-pic > "${BDB_LOG}"
background "make ${PARALLEL}" "${BDB_LOG}" "Waiting for bdb to finish building"
make install >> "${BDB_LOG}" 2>&1
}
function build_openssl() {
OPENSSL_LOG="$1"
mkdir -p "${OPENSSL_PREFIX}/ssl"
cd openssl-1.0.2r
echo "Building openssl. tail -f $OPENSSL_LOG to see the details and monitor progress"
if [ "${OS_NAME}" = "osx" ]; then
./Configure --prefix="${OPENSSL_PREFIX}" --openssldir="${OPENSSL_PREFIX}/ssl" \
-fPIC darwin64-x86_64-cc \
no-shared no-dso no-engines > "${OPENSSL_LOG}"
make depend
else
[[ $(uname -m) = 'i686' ]] && OS_ARCH="linux-generic32" || OS_ARCH="linux-x86_64"
./Configure --prefix="${OPENSSL_PREFIX}" --openssldir="${OPENSSL_PREFIX}/ssl" \
${OS_ARCH} -fPIC -static no-shared no-dso > "${OPENSSL_LOG}"
fi
background "make ${PARALLEL}" "${OPENSSL_LOG}" "Waiting for openssl to finish building"
make install >> "${OPENSSL_LOG}" 2>&1
}
function build_boost() {
BOOST_LOG="$1"
cd boost_1_64_0
echo "int main() { return 0; }" > libs/regex/build/has_icu_test.cpp
echo "int main() { return 0; }" > libs/locale/build/has_icu_test.cpp
export BOOST_ICU_LIBS="-L${ICU_PREFIX}/lib -licui18n -licuuc -licudata -ldl"
export BOOST_LIBRARIES="chrono,filesystem,program_options,system,locale,regex,thread,test"
echo "Building Boost. tail -f ${BOOST_LOG} to see the details and monitor progress"
./bootstrap.sh --prefix="${BOOST_PREFIX}" --with-icu="${ICU_PREFIX}" --with-libraries=${BOOST_LIBRARIES} > "${BOOST_LOG}" 2>&1
b2cmd="./b2 --reconfigure ${PARALLEL} link=static cxxflags=\"-std=c++11 -fPIC\" install boost.locale.iconv=off boost.locale.posix=off -sICU_PATH=\"${ICU_PREFIX}\" -sICU_LINK=\"${BOOST_ICU_LIBS}\""
background "${b2cmd}" "${BOOST_LOG}" "Waiting for boost to finish building"
}
function build_icu() {
ICU_LOG="$1"
mkdir -p "${ICU_PREFIX}/icu"
pushd icu/source > /dev/null
echo "Building icu. tail -f $ICU_LOG to see the details and monitor progress"
./configure --prefix="${ICU_PREFIX}" --enable-draft --enable-tools \
--disable-shared --enable-static --disable-extras --disable-icuio --disable-dyload \
--disable-layout --disable-layoutex --disable-tests --disable-samples CFLAGS=-fPIC CPPFLAGS=-fPIC > "${ICU_LOG}"
if [ ! -z ${TARGET+x} ]; then
TMP_TARGET="${TARGET}"
unset TARGET
fi
set +e
background "make ${PARALLEL} VERBOSE=1" "${ICU_LOG}" "Waiting for icu to finish building"
make install >> "${ICU_LOG}" 2>&1
if [ ! -z ${TARGET+x} ]; then
TARGET="${TMP_TARGET}"
fi
set -e
popd > /dev/null
}
function build_libevent() {
LIBEVENT_LOG="$1"
cd libevent-2.1.8-stable
echo "Building libevent. tail -f ${LIBEVENT_LOG} to see the details and monitor progress"
./autogen.sh > "${LIBEVENT_LOG}" 2>&1
./configure --prefix="${LIBEVENT_PREFIX}" --enable-static --disable-shared --with-pic \
LDFLAGS="-L${OPENSSL_PREFIX}/lib" \
CPPFLAGS="-I${OPENSSL_PREFIX}/include" >> "${LIBEVENT_LOG}" 2>&1
background "make ${PARALLEL}" "${LIBEVENT_LOG}" "Waiting for libevent to finish building"
make install >> "${LIBEVENT_LOG}"
}
function build_dependency() {
pushd .
PREFIX=$1
LOG=$2
BUILD=$3
cd "${LBRYCRD_DEPENDENCIES}"
mkdir -p "${PREFIX}"
trap 'cleanup "${PREFIX}" "${LOG}"' INT TERM EXIT
"${BUILD}" "${LOG}"
trap - INT TERM EXIT
popd
}
function build_lbrycrd() {
cd "${SOURCE_DIR}"
./autogen.sh > "${LBRYCRD_LOG}" 2>&1
LDFLAGS="-L${OPENSSL_PREFIX}/lib -L${BDB_PREFIX}/lib -L${LIBEVENT_PREFIX}/lib"
OPTIONS="--enable-cxx --enable-static --disable-shared --with-pic"
if [ "${OS_NAME}" = "osx" ]; then
CPPFLAGS="-I${OPENSSL_PREFIX}/include -I${BDB_PREFIX}/include -I${LIBEVENT_PREFIX}/include -I${ICU_PREFIX}/include"
else
CPPFLAGS="-I${OPENSSL_PREFIX}/include -I${BDB_PREFIX}/include -I${LIBEVENT_PREFIX}/include -I${ICU_PREFIX}/include -Wno-unused-local-typedefs -Wno-deprecated -Wno-implicit-fallthrough"
fi
CPPFLAGS="${CPPFLAGS}" LDFLAGS="${LDFLAGS}" \
./configure --without-gui ${OPTIONS} \
--with-boost="${BOOST_PREFIX}" \
--with-icu="${ICU_PREFIX}" >> "${LBRYCRD_LOG}" 2>&1
background "make ${PARALLEL}" "${LBRYCRD_LOG}" "Waiting for lbrycrd to finish building"
}
function clang_format_diff(){
# run a code formatting check on any commits not in master
# requires clang-format
git diff -U0 origin/master -- '*.h' '*.cpp' | ./contrib/devtools/clang-format-diff.py -p1
}
# these variables are needed in both functions
LBRYCRD_DEPENDENCIES="$(pwd)/lbrycrd-dependencies"
OUTPUT_DIR="$(pwd)/build"
LOG_DIR="$(pwd)/logs"
ICU_PREFIX="${OUTPUT_DIR}/icu"
BDB_PREFIX="${OUTPUT_DIR}/bdb"
OPENSSL_PREFIX="${OUTPUT_DIR}/openssl"
BOOST_PREFIX="${OUTPUT_DIR}/boost"
LIBEVENT_PREFIX="${OUTPUT_DIR}/libevent"
if [ "${BUILD_DEPENDENCIES}" = true ]; then
build_dependencies
fi
if [ "${CHECK_CODE_FORMAT}" = true ]; then
LINES_W_FORMAT_REQUIRED=$(clang_format_diff | wc -l)
if [ ${LINES_W_FORMAT_REQUIRED} -ne 0 ]; then
echo "Failed to pass clang format diff: See below for the diff"
clang_format_diff
exit 1
fi
fi
set +u
export PKG_CONFIG_PATH="${PKG_CONFIG_PATH}:${OPENSSL_PREFIX}/lib/pkgconfig:${LIBEVENT_PREFIX}/lib/pkgconfig:${ICU_PREFIX}/lib/pkgconfig"
set -u
if [ "${BUILD_LBRYCRD}" = true ]; then
LBRYCRD_LOG="${LOG_DIR}/lbrycrd_build.log"
echo "Building lbrycrd. tail -f ${LBRYCRD_LOG} to see the details and monitor progress"
trap 'cat_and_exit "${LBRYCRD_LOG}"' INT TERM EXIT
build_lbrycrd
trap - INT TERM EXIT
./src/test/test_lbrycrd
set +u
if [[ ! $CXXFLAGS =~ -g ]]; then
strip src/lbrycrdd
strip src/lbrycrd-cli
strip src/lbrycrd-tx
fi
fi

View file

@ -19,7 +19,7 @@ else
LIBUNIVALUE = $(UNIVALUE_LIBS)
endif
BITCOIN_INCLUDES=-I$(builddir) $(BDB_CPPFLAGS) $(ICU_CPPFLAGS) $(BOOST_CPPFLAGS) $(LEVELDB_CPPFLAGS) $(CRYPTO_CFLAGS) $(ICU_CFLAGS)
BITCOIN_INCLUDES=-I$(builddir) $(BDB_CPPFLAGS) $(ICU_CPPFLAGS) $(BOOST_CPPFLAGS) $(LEVELDB_CPPFLAGS) $(CRYPTO_CFLAGS)
BITCOIN_INCLUDES += -I$(srcdir)/secp256k1/include
BITCOIN_INCLUDES += $(UNIVALUE_CFLAGS)
@ -149,13 +149,11 @@ BITCOIN_CORE_H = \
policy/policy.h \
policy/rbf.h \
pow.h \
prefixtrie.h \
protocol.h \
random.h \
reverse_iterator.h \
reverselock.h \
rpc/blockchain.h \
rpc/claimrpchelp.h \
rpc/client.h \
rpc/mining.h \
rpc/protocol.h \
@ -219,7 +217,7 @@ obj/build.h: FORCE
"$(abs_top_srcdir)"
libbitcoin_util_a-clientversion.$(OBJEXT): obj/build.h
# server: shared between lbrycrdd and lbrycrd-qt
# server: shared between bitcoind and bitcoin-qt
libbitcoin_server_a_CPPFLAGS = $(AM_CPPFLAGS) $(BITCOIN_INCLUDES) $(MINIUPNPC_CPPFLAGS) $(EVENT_CFLAGS) $(EVENT_PTHREADS_CFLAGS)
libbitcoin_server_a_CXXFLAGS = $(AM_CXXFLAGS) $(PIE_FLAGS)
libbitcoin_server_a_SOURCES = \
@ -251,7 +249,6 @@ libbitcoin_server_a_SOURCES = \
policy/policy.cpp \
policy/rbf.cpp \
pow.cpp \
prefixtrie.cpp \
rest.cpp \
rpc/blockchain.cpp \
rpc/claimtrie.cpp \
@ -286,7 +283,7 @@ libbitcoin_zmq_a_SOURCES = \
endif
# wallet: shared between lbrycrdd and lbrycrd-qt, but only linked
# wallet: shared between bitcoind and bitcoin-qt, but only linked
# when wallet enabled
libbitcoin_wallet_a_CPPFLAGS = $(AM_CPPFLAGS) $(BITCOIN_INCLUDES)
libbitcoin_wallet_a_CXXFLAGS = $(AM_CXXFLAGS) $(PIE_FLAGS)
@ -386,7 +383,7 @@ libbitcoin_consensus_a_SOURCES = \
utilstrencodings.h \
version.h
# common: shared between lbrycrdd, and lbrycrd-qt and non-server tools
# common: shared between bitcoind, and bitcoin-qt and non-server tools
libbitcoin_common_a_CPPFLAGS = $(AM_CPPFLAGS) $(BITCOIN_INCLUDES)
libbitcoin_common_a_CXXFLAGS = $(AM_CXXFLAGS) $(PIE_FLAGS)
libbitcoin_common_a_SOURCES = \
@ -446,7 +443,7 @@ libbitcoin_util_a_SOURCES += compat/glibc_compat.cpp
AM_LDFLAGS += $(COMPAT_LDFLAGS)
endif
# cli: shared between lbrycrd-cli and lbrycrd-qt
# cli: shared between lbrycrd-cli and bitcoin-qt
libbitcoin_cli_a_CPPFLAGS = $(AM_CPPFLAGS) $(BITCOIN_INCLUDES)
libbitcoin_cli_a_CXXFLAGS = $(AM_CXXFLAGS) $(PIE_FLAGS)
libbitcoin_cli_a_SOURCES = \

View file

@ -32,7 +32,7 @@ bench_bench_bitcoin_SOURCES = \
nodist_bench_bench_bitcoin_SOURCES = $(GENERATED_BENCH_FILES)
bench_bench_bitcoin_CPPFLAGS = $(AM_CPPFLAGS) $(BITCOIN_INCLUDES) $(EVENT_CLFAGS) $(EVENT_PTHREADS_CFLAGS) $(BOOST_CPPFLAGS) -I$(builddir)/bench/
bench_bench_bitcoin_CPPFLAGS = $(AM_CPPFLAGS) $(BITCOIN_INCLUDES) $(EVENT_CLFAGS) $(EVENT_PTHREADS_CFLAGS) -I$(builddir)/bench/
bench_bench_bitcoin_CXXFLAGS = $(AM_CXXFLAGS) $(PIE_FLAGS)
bench_bench_bitcoin_LDADD = \
$(LIBBITCOIN_WALLET) \

View file

@ -45,7 +45,6 @@ BITCOIN_TESTS =\
test/bswap_tests.cpp \
test/checkqueue_tests.cpp \
test/coins_tests.cpp \
test/compilerbug_tests.cpp \
test/compress_tests.cpp \
test/crypto_tests.cpp \
test/cuckoocache_tests.cpp \
@ -66,17 +65,11 @@ BITCOIN_TESTS =\
test/net_tests.cpp \
test/claimtriecache_tests.cpp \
test/claimtriebranching_tests.cpp \
test/claimtrieexpirationfork_tests.cpp \
test/claimtriefixture.cpp \
test/claimtriehashfork_tests.cpp \
test/claimtrienormalization_tests.cpp \
test/claimtrierpc_tests.cpp \
test/nameclaim_tests.cpp \
test/netbase_tests.cpp \
test/pmt_tests.cpp \
test/policyestimator_tests.cpp \
test/pow_tests.cpp \
test/prefixtrie_tests.cpp \
test/prevector_tests.cpp \
test/raii_event_tests.cpp \
test/random_tests.cpp \
@ -157,6 +150,7 @@ test_test_lbrycrd_fuzzy_LDADD = \
$(LIBSECP256K1)
test_test_lbrycrd_fuzzy_LDADD += $(BOOST_LIBS) $(CRYPTO_LIBS) $(ICU_LIBS)
#
nodist_test_test_lbrycrd_SOURCES = $(GENERATED_TEST_FILES)

View file

@ -98,7 +98,7 @@ bool DeserializeFileDB(const fs::path& path, Data& data)
FILE *file = fsbridge::fopen(path, "rb");
CAutoFile filein(file, SER_DISK, CLIENT_VERSION);
if (filein.IsNull())
return false;
return error("%s: Failed to open file %s", __func__, path.string());
return DeserializeDB(filein, data);
}

View file

@ -3,7 +3,6 @@
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <bloom.h>
#include <nameclaim.h>
#include <primitives/transaction.h>
#include <hash.h>

View file

@ -123,7 +123,7 @@ void CChainParams::UpdateVersionBitsParameters(Consensus::DeploymentPos d, int64
class CMainParams : public CChainParams {
public:
CMainParams() {
strNetworkID = CBaseChainParams::MAIN;
strNetworkID = "lbrycrd";
consensus.nSubsidyLevelInterval = 1<<5;
consensus.nMajorityEnforceBlockUpgrade = 750;
consensus.nMajorityRejectBlockOutdated = 950;
@ -132,8 +132,8 @@ public:
consensus.BIP34Height = 1;
consensus.BIP34Hash = uint256S("0xdecb9e2cca03a419fd9cca0cb2b1d5ad11b088f22f8f38556d93ac4358b86c24");
// FIXME: adjust heights
consensus.BIP65Height = 200000;
consensus.BIP66Height = 200000;
consensus.BIP65Height = 600000;
consensus.BIP66Height = 600000;
consensus.powLimit = uint256S("0000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
consensus.nPowTargetTimespan = 150; //retarget every block
consensus.nPowTargetSpacing = 150;
@ -143,14 +143,10 @@ public:
consensus.nAllowMinDiffMinHeight = -1;
consensus.nAllowMinDiffMaxHeight = -1;
consensus.nNormalizedNameForkHeight = 539940; // targeting 21 March 2019
consensus.nMinTakeoverWorkaroundHeight = 496850;
consensus.nMaxTakeoverWorkaroundHeight = 658300; // targeting 30 Oct 2019
consensus.nWitnessForkHeight = 680770; // targeting 11 Dec 2019
consensus.nAllClaimsInMerkleForkHeight = 658310; // targeting 30 Oct 2019
consensus.fPowAllowMinDifficultyBlocks = false;
consensus.fPowNoRetargeting = false;
consensus.nRuleChangeActivationThreshold = 1916; // 95% of a half week
consensus.nMinerConfirmationWindow = 2016;
consensus.nRuleChangeActivationThreshold = 1916; // 95% of 2016
consensus.nMinerConfirmationWindow = 2016; // nPowTargetTimespan / nPowTargetSpacing
consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].bit = 28;
consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nStartTime = 1199145601; // January 1, 2008
consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nTimeout = 1230767999; // December 31, 2008
@ -160,16 +156,17 @@ public:
consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nStartTime = 1462060800; // May 1st, 2016
consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nTimeout = 1493596800; // May 1st, 2017
// Deployment of SegWit (BIP141, BIP143, and BIP147) -- Unused (see nWitnessForkHeight).
// Deployment of SegWit (BIP141, BIP143, and BIP147)
consensus.vDeployments[Consensus::DEPLOYMENT_SEGWIT].bit = 1;
consensus.vDeployments[Consensus::DEPLOYMENT_SEGWIT].nStartTime = 1547942400; // Jan 20, 2019
consensus.vDeployments[Consensus::DEPLOYMENT_SEGWIT].nTimeout = 1548288000; // Jan 24, 2019
// The best chain should have at least this much work.
consensus.nMinimumChainWork = uint256S("000000000000000000000000000000000000000000000499ed6684d1bf6f6fd3"); //946000
consensus.nMinimumChainWork = uint256S("0x000000000000000000000000000000000000000000000000607ca7e806c4c1e9"); //400000
// By default assume that the signatures in ancestors of this block are valid.
consensus.defaultAssumeValid = uint256S("0d3b537afe49820e1c6efc555463f955251b1293c6e5130137e1e25744431172"); //946000
//consensus.defaultAssumeValid = uint256S("0xf0e56e70782af63ccb49c76e852540688755869ba59ec68cac9c04a6b4d9f5ca"); //400000
consensus.defaultAssumeValid = uint256S("0xa6bbb48f5343eb9b0287c22f3ea8b29f36cf10794a37f8a925a894d6f4519913"); //4000
/**
* The message start string is designed to be unlikely to occur in normal data.
@ -195,11 +192,9 @@ public:
vSeeds.clear();
vFixedSeeds.clear();
vSeeds.emplace_back("dnsseed1.lbry.io"); // LBRY Inc
vSeeds.emplace_back("dnsseed2.lbry.io"); // LBRY Inc
vSeeds.emplace_back("dnsseed3.lbry.io"); // LBRY Inc
vSeeds.emplace_back("seed.lbry.grin.io"); // Grin
vSeeds.emplace_back("seed.allaboutlbc.com"); // Madiator2011
vSeeds.emplace_back("dnsseed1.lbry.io"); // lbry.io
vSeeds.emplace_back("dnsseed2.lbry.io"); // lbry.io
vSeeds.emplace_back("dnsseed3.lbry.io"); // lbry.io
base58Prefixes[PUBKEY_ADDRESS] = std::vector<unsigned char>(1, 0x55);
base58Prefixes[SCRIPT_ADDRESS] = std::vector<unsigned char>(1, 0x7a);
@ -207,7 +202,9 @@ public:
base58Prefixes[EXT_PUBLIC_KEY] = {0x04, 0x88, 0xB2, 0x1E};
base58Prefixes[EXT_SECRET_KEY] = {0x04, 0x88, 0xAD, 0xE4};
bech32_hrp = "lbc";
vFixedSeeds = std::vector<SeedSpec6>(pnSeed6_main, pnSeed6_main + ARRAYLEN(pnSeed6_main));
bech32_hrp = "bc";
vFixedSeeds = std::vector<SeedSpec6>(pnSeed6_main, pnSeed6_main + ARRAYLEN(pnSeed6_main));
@ -241,7 +238,7 @@ public:
class CTestNetParams : public CChainParams {
public:
CTestNetParams() {
strNetworkID = CBaseChainParams::TESTNET;
strNetworkID = "lbrycrdtest";
consensus.nSubsidyLevelInterval = 1 << 5;
consensus.nMajorityEnforceBlockUpgrade = 51;
consensus.nMajorityRejectBlockOutdated = 75;
@ -261,10 +258,6 @@ public:
consensus.nAllowMinDiffMinHeight = 277299;
consensus.nAllowMinDiffMaxHeight = 1100000;
consensus.nNormalizedNameForkHeight = 993380; // targeting, 21 Feb 2019
consensus.nMinTakeoverWorkaroundHeight = 99;
consensus.nMaxTakeoverWorkaroundHeight = 1198550; // targeting 30 Sep 2019
consensus.nWitnessForkHeight = 1198600;
consensus.nAllClaimsInMerkleForkHeight = 1198560; // targeting 30 Sep 2019
consensus.fPowAllowMinDifficultyBlocks = true;
consensus.fPowNoRetargeting = false;
consensus.nRuleChangeActivationThreshold = 1512; // 75% for testchains
@ -278,7 +271,7 @@ public:
consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nStartTime = 1456790400; // March 1st, 2016
consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nTimeout = 1493596800; // May 1st, 2017
// Deployment of SegWit (BIP141, BIP143, and BIP147) -- Unused (see nWitnessForkHeight).
// Deployment of SegWit (BIP141, BIP143, and BIP147)
consensus.vDeployments[Consensus::DEPLOYMENT_SEGWIT].bit = 1;
consensus.vDeployments[Consensus::DEPLOYMENT_SEGWIT].nStartTime = 1462060800; // May 1st 2016
consensus.vDeployments[Consensus::DEPLOYMENT_SEGWIT].nTimeout = 1493596800; // May 1st 2017
@ -317,7 +310,7 @@ public:
base58Prefixes[EXT_PUBLIC_KEY] = {0x04, 0x35, 0x87, 0xCF};
base58Prefixes[EXT_SECRET_KEY] = {0x04, 0x35, 0x83, 0x94};
bech32_hrp = "tlbc";
bech32_hrp = "tb";
vFixedSeeds = std::vector<SeedSpec6>(pnSeed6_test, pnSeed6_test + ARRAYLEN(pnSeed6_test));
@ -351,7 +344,7 @@ public:
class CRegTestParams : public CChainParams {
public:
CRegTestParams() {
strNetworkID = CBaseChainParams::REGTEST;
strNetworkID = "lbrycrdreg";
consensus.nSubsidyLevelInterval = 1 << 5;
consensus.BIP16Exception = uint256();
consensus.BIP34Height = 1000; // BIP34 is needed for validation_block_tests
@ -368,10 +361,6 @@ public:
consensus.nAllowMinDiffMinHeight = -1;
consensus.nAllowMinDiffMaxHeight = -1;
consensus.nNormalizedNameForkHeight = 250; // SDK depends upon this number
consensus.nMinTakeoverWorkaroundHeight = -1;
consensus.nMaxTakeoverWorkaroundHeight = -1;
consensus.nWitnessForkHeight = 150;
consensus.nAllClaimsInMerkleForkHeight = 350;
consensus.fPowAllowMinDifficultyBlocks = false;
consensus.fPowNoRetargeting = false;
consensus.nRuleChangeActivationThreshold = 108; // 75% for testchains
@ -436,7 +425,7 @@ public:
base58Prefixes[EXT_PUBLIC_KEY] = {0x04, 0x35, 0x87, 0xCF};
base58Prefixes[EXT_SECRET_KEY] = {0x04, 0x35, 0x83, 0x94};
bech32_hrp = "rlbc";
bech32_hrp = "bcrt";
/* enable fallback fee on regtest */
m_fallback_fee_enabled = true;

View file

@ -13,7 +13,7 @@
const std::string CBaseChainParams::MAIN = "lbrycrd";
const std::string CBaseChainParams::TESTNET = "lbrycrdtest";
const std::string CBaseChainParams::REGTEST = "lbrycrdreg";
const std::string CBaseChainParams::REGTEST = "regtest";
void SetupChainParamsBaseOptions()
{

View file

@ -2,9 +2,8 @@
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <coins.h>
#include <claimscriptop.h>
#include <nameclaim.h>
#include "claimscriptop.h"
#include "nameclaim.h"
CClaimScriptAddOp::CClaimScriptAddOp(const COutPoint& point, CAmount nValue, int nHeight)
: point(point), nValue(nValue), nHeight(nHeight)
@ -38,37 +37,32 @@ CClaimScriptUndoAddOp::CClaimScriptUndoAddOp(const COutPoint& point, int nHeight
bool CClaimScriptUndoAddOp::claimName(CClaimTrieCache& trieCache, const std::string& name)
{
auto claimId = ClaimIdHash(point.hash, point.n);
LogPrint(BCLog::CLAIMS, "--- [%lu]: OP_CLAIM_NAME \"%s\" with claimId %s and tx prevout %s at index %d\n", nHeight, name, claimId.GetHex(), point.hash.ToString(), point.n);
LogPrintf("--- [%lu]: OP_CLAIM_NAME \"%s\" with claimId %s and tx prevout %s at index %d\n", nHeight, name, claimId.GetHex(), point.hash.ToString(), point.n);
return undoAddClaim(trieCache, name, claimId);
}
bool CClaimScriptUndoAddOp::updateClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId)
{
LogPrint(BCLog::CLAIMS, "--- [%lu]: OP_UPDATE_CLAIM \"%s\" with claimId %s and tx prevout %s at index %d\n", nHeight, name, claimId.GetHex(), point.hash.ToString(), point.n);
LogPrintf("--- [%lu]: OP_UPDATE_CLAIM \"%s\" with claimId %s and tx prevout %s at index %d\n", nHeight, name, claimId.GetHex(), point.hash.ToString(), point.n);
return undoAddClaim(trieCache, name, claimId);
}
bool CClaimScriptUndoAddOp::undoAddClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId)
{
LogPrint(BCLog::CLAIMS, "%s: (txid: %s, nOut: %d) Removing %s, claimId: %s, from the claim trie due to block disconnect\n", __func__, point.hash.ToString(), point.n, name, claimId.ToString());
LogPrintf("%s: (txid: %s, nOut: %d) Removing %s, claimId: %s, from the claim trie due to block disconnect\n", __func__, point.hash.ToString(), point.n, name, claimId.ToString());
bool res = trieCache.undoAddClaim(name, point, nHeight);
if (!res)
LogPrint(BCLog::CLAIMS, "%s: Removing claim fails\n", __func__);
LogPrintf("%s: Removing fails\n", __func__);
return res;
}
bool CClaimScriptUndoAddOp::supportClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId)
{
if (LogAcceptCategory(BCLog::CLAIMS)) {
LogPrintf("--- [%lu]: OP_SUPPORT_CLAIM \"%s\" with claimId %s and tx prevout %s at index %d\n", nHeight, name,
claimId.GetHex(), point.hash.ToString(), point.n);
LogPrintf(
"%s: (txid: %s, nOut: %d) Removing support for %s, claimId: %s, from the claim trie due to block disconnect\n",
__func__, point.hash.ToString(), point.n, name, claimId.ToString());
}
LogPrintf("--- [%lu]: OP_SUPPORT_CLAIM \"%s\" with claimId %s and tx prevout %s at index %d\n", nHeight, name, claimId.GetHex(), point.hash.ToString(), point.n);
LogPrintf("%s: (txid: %s, nOut: %d) Removing support for %s, claimId: %s, from the claim trie due to block disconnect\n", __func__, point.hash.ToString(), point.n, name, claimId.ToString());
bool res = trieCache.undoAddSupport(name, point, nHeight);
if (!res)
LogPrint(BCLog::CLAIMS, "%s: Removing support fails\n", __func__);
LogPrintf("%s: Removing support fails\n", __func__);
return res;
}
@ -80,36 +74,32 @@ CClaimScriptSpendOp::CClaimScriptSpendOp(const COutPoint& point, int nHeight, in
bool CClaimScriptSpendOp::claimName(CClaimTrieCache& trieCache, const std::string& name)
{
auto claimId = ClaimIdHash(point.hash, point.n);
LogPrint(BCLog::CLAIMS, "+++ [%lu]: OP_CLAIM_NAME \"%s\" with claimId %s and tx prevout %s at index %d\n", nHeight, name, claimId.GetHex(), point.hash.ToString(), point.n);
LogPrintf("+++ [%lu]: OP_CLAIM_NAME \"%s\" with claimId %s and tx prevout %s at index %d\n", nHeight, name, claimId.GetHex(), point.hash.ToString(), point.n);
return spendClaim(trieCache, name, claimId);
}
bool CClaimScriptSpendOp::updateClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId)
{
LogPrint(BCLog::CLAIMS, "+++ [%lu]: OP_UPDATE_CLAIM \"%s\" with claimId %s and tx prevout %s at index %d\n", nHeight, name, claimId.GetHex(), point.hash.ToString(), point.n);
LogPrintf("+++ [%lu]: OP_UPDATE_CLAIM \"%s\" with claimId %s and tx prevout %s at index %d\n", nHeight, name, claimId.GetHex(), point.hash.ToString(), point.n);
return spendClaim(trieCache, name, claimId);
}
bool CClaimScriptSpendOp::spendClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId)
{
LogPrint(BCLog::CLAIMS, "%s: (txid: %s, nOut: %d) Removing %s, claimId: %s, from the claim trie\n", __func__, point.hash.ToString(), point.n, name, claimId.ToString());
LogPrintf("%s: (txid: %s, nOut: %d) Removing %s, claimId: %s, from the claim trie\n", __func__, point.hash.ToString(), point.n, name, claimId.ToString());
bool res = trieCache.spendClaim(name, point, nHeight, nValidHeight);
if (!res)
LogPrint(BCLog::CLAIMS, "%s: Removing fails\n", __func__);
LogPrintf("%s: Removing fails\n", __func__);
return res;
}
bool CClaimScriptSpendOp::supportClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId)
{
if (LogAcceptCategory(BCLog::CLAIMS)) {
LogPrintf("+++ [%lu]: OP_SUPPORT_CLAIM \"%s\" with claimId %s and tx prevout %s at index %d\n", nHeight, name,
claimId.GetHex(), point.hash.ToString(), point.n);
LogPrintf("%s: (txid: %s, nOut: %d) Restoring support for %s, claimId: %s, to the claim trie\n", __func__,
point.hash.ToString(), point.n, name, claimId.ToString());
}
LogPrintf("+++ [%lu]: OP_SUPPORT_CLAIM \"%s\" with claimId %s and tx prevout %s at index %d\n", nHeight, name, claimId.GetHex(), point.hash.ToString(), point.n);
LogPrintf("%s: (txid: %s, nOut: %d) Restoring support for %s, claimId: %s, to the claim trie\n", __func__, point.hash.ToString(), point.n, name, claimId.ToString());
bool res = trieCache.spendSupport(name, point, nHeight, nValidHeight);
if (!res)
LogPrint(BCLog::CLAIMS, "%s: Removing support fails\n", __func__);
LogPrintf("%s: Removing support fails\n", __func__);
return res;
}
@ -130,13 +120,13 @@ bool CClaimScriptUndoSpendOp::updateClaim(CClaimTrieCache& trieCache, const std:
bool CClaimScriptUndoSpendOp::undoSpendClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId)
{
LogPrint(BCLog::CLAIMS, "%s: (txid: %s, nOut: %d) Restoring %s, claimId: %s, to the claim trie due to block disconnect\n", __func__, point.hash.ToString(), point.n, name, claimId.ToString());
LogPrintf("%s: (txid: %s, nOut: %d) Restoring %s, claimId: %s, to the claim trie due to block disconnect\n", __func__, point.hash.ToString(), point.n, name, claimId.ToString());
return trieCache.undoSpendClaim(name, point, claimId, nValue, nHeight, nValidHeight);
}
bool CClaimScriptUndoSpendOp::supportClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId)
{
LogPrint(BCLog::CLAIMS, "%s: (txid: %s, nOut: %d) Restoring support for %s, claimId: %s, to the claim trie due to block disconnect\n", __func__, point.hash.ToString(), point.n, name, claimId.ToString());
LogPrintf("%s: (txid: %s, nOut: %d) Restoring support for %s, claimId: %s, to the claim trie due to block disconnect\n", __func__, point.hash.ToString(), point.n, name, claimId.ToString());
return trieCache.undoSpendSupport(name, point, claimId, nValue, nHeight, nValidHeight);
}
@ -149,7 +139,7 @@ bool ProcessClaim(CClaimScriptOp& claimOp, CClaimTrieCache& trieCache, const CSc
{
int op;
std::vector<std::vector<unsigned char> > vvchParams;
if (!DecodeClaimScript(scriptPubKey, op, vvchParams, trieCache.allowSupportMetadata()))
if (!DecodeClaimScript(scriptPubKey, op, vvchParams))
return false;
switch (op) {
@ -163,81 +153,59 @@ bool ProcessClaim(CClaimScriptOp& claimOp, CClaimTrieCache& trieCache, const CSc
throw std::runtime_error("Unimplemented OP handler.");
}
void UpdateCache(const CTransaction& tx, CClaimTrieCache& trieCache, const CCoinsViewCache& view, int nHeight, const CUpdateCacheCallbacks& callbacks)
bool SpendClaim(CClaimTrieCache& trieCache, const CScript& scriptPubKey, const COutPoint& point, int nHeight, int& nValidHeight, spentClaimsType& spentClaims)
{
class CSpendClaimHistory : public CClaimScriptSpendOp
{
public:
using CClaimScriptSpendOp::CClaimScriptSpendOp;
CSpendClaimHistory(spentClaimsType& spentClaims, const COutPoint& point, int nHeight, int& nValidHeight)
: CClaimScriptSpendOp(point, nHeight, nValidHeight), spentClaims(spentClaims)
{
}
bool spendClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId) override
{
if (CClaimScriptSpendOp::spendClaim(trieCache, name, claimId)) {
callback(name, claimId);
spentClaims.emplace_back(name, claimId);
return true;
}
return false;
}
std::function<void(const std::string& name, const uint160& claimId)> callback;
private:
spentClaimsType& spentClaims;
};
spentClaimsType spentClaims;
for (std::size_t j = 0; j < tx.vin.size(); j++) {
const CTxIn& txin = tx.vin[j];
const Coin& coin = view.AccessCoin(txin.prevout);
CScript scriptPubKey;
int scriptHeight = nHeight;
if (coin.out.IsNull() && callbacks.findScriptKey) {
scriptPubKey = callbacks.findScriptKey(txin.prevout);
} else {
scriptHeight = coin.nHeight;
scriptPubKey = coin.out.scriptPubKey;
}
if (scriptPubKey.empty())
continue;
int nValidAtHeight;
CSpendClaimHistory spendClaim(COutPoint(txin.prevout.hash, txin.prevout.n), scriptHeight, nValidAtHeight);
spendClaim.callback = [&spentClaims](const std::string& name, const uint160& claimId) {
spentClaims.emplace_back(name, claimId);
};
if (ProcessClaim(spendClaim, trieCache, scriptPubKey) && callbacks.claimUndoHeights)
callbacks.claimUndoHeights(j, nValidAtHeight);
}
CSpendClaimHistory spendClaim(spentClaims, point, nHeight, nValidHeight);
return ProcessClaim(spendClaim, trieCache, scriptPubKey);
}
bool AddSpendClaim(CClaimTrieCache& trieCache, const CScript& scriptPubKey, const COutPoint& point, CAmount nValue, int nHeight, spentClaimsType& spentClaims)
{
class CAddSpendClaim : public CClaimScriptAddOp
{
public:
using CClaimScriptAddOp::CClaimScriptAddOp;
CAddSpendClaim(spentClaimsType& spentClaims, const COutPoint& point, CAmount nValue, int nHeight)
: CClaimScriptAddOp(point, nValue, nHeight), spentClaims(spentClaims)
{
}
bool updateClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId) override
{
if (callback(name, claimId))
return CClaimScriptAddOp::updateClaim(trieCache, name, claimId);
return false;
}
std::function<bool(const std::string& name, const uint160& claimId)> callback;
};
for (std::size_t j = 0; j < tx.vout.size(); j++) {
const CTxOut& txout = tx.vout[j];
if (txout.scriptPubKey.empty())
continue;
CAddSpendClaim addClaim(COutPoint(tx.GetHash(), j), txout.nValue, nHeight);
addClaim.callback = [&trieCache, &spentClaims](const std::string& name, const uint160& claimId) -> bool {
for (auto itSpent = spentClaims.begin(); itSpent != spentClaims.end(); ++itSpent) {
spentClaimsType::iterator itSpent = spentClaims.begin();
for (; itSpent != spentClaims.end(); ++itSpent) {
if (itSpent->second == claimId && trieCache.normalizeClaimName(name) == trieCache.normalizeClaimName(itSpent->first)) {
spentClaims.erase(itSpent);
return true;
return CClaimScriptAddOp::updateClaim(trieCache, name, claimId);
}
}
return false;
};
ProcessClaim(addClaim, trieCache, txout.scriptPubKey);
}
}
}
private:
spentClaimsType& spentClaims;
};
CAddSpendClaim addClaim(spentClaims, point, nValue, nHeight);
return ProcessClaim(addClaim, trieCache, scriptPubKey);
}

View file

@ -59,17 +59,17 @@ public:
*/
CClaimScriptAddOp(const COutPoint& point, CAmount nValue, int nHeight);
/**
* Implementation of OP_CLAIM_NAME handler
* Implamention of OP_CLAIM_NAME handler
* @see CClaimScriptOp::claimName
*/
bool claimName(CClaimTrieCache& trieCache, const std::string& name) override;
/**
* Implementation of OP_UPDATE_CLAIM handler
* Implamention of OP_UPDATE_CLAIM handler
* @see CClaimScriptOp::updateClaim
*/
bool updateClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId) override;
/**
* Implementation of OP_SUPPORT_CLAIM handler
* Implamention of OP_SUPPORT_CLAIM handler
* @see CClaimScriptOp::supportClaim
*/
bool supportClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId) override;
@ -100,17 +100,17 @@ public:
*/
CClaimScriptUndoAddOp(const COutPoint& point, int nHeight);
/**
* Implementation of OP_CLAIM_NAME handler
* Implamention of OP_CLAIM_NAME handler
* @see CClaimScriptOp::claimName
*/
bool claimName(CClaimTrieCache& trieCache, const std::string& name) override;
/**
* Implementation of OP_UPDATE_CLAIM handler
* Implamention of OP_UPDATE_CLAIM handler
* @see CClaimScriptOp::updateClaim
*/
bool updateClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId) override;
/**
* Implementation of OP_SUPPORT_CLAIM handler
* Implamention of OP_SUPPORT_CLAIM handler
* @see CClaimScriptOp::supportClaim
*/
bool supportClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId) override;
@ -141,17 +141,17 @@ public:
*/
CClaimScriptSpendOp(const COutPoint& point, int nHeight, int& nValidHeight);
/**
* Implementation of OP_CLAIM_NAME handler
* Implamention of OP_CLAIM_NAME handler
* @see CClaimScriptOp::claimName
*/
bool claimName(CClaimTrieCache& trieCache, const std::string& name) override;
/**
* Implementation of OP_UPDATE_CLAIM handler
* Implamention of OP_UPDATE_CLAIM handler
* @see CClaimScriptOp::updateClaim
*/
bool updateClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId) override;
/**
* Implementation of OP_SUPPORT_CLAIM handler
* Implamention of OP_SUPPORT_CLAIM handler
* @see CClaimScriptOp::supportClaim
*/
bool supportClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId) override;
@ -184,17 +184,17 @@ public:
*/
CClaimScriptUndoSpendOp(const COutPoint& point, CAmount nValue, int nHeight, int nValidHeight);
/**
* Implementation of OP_CLAIM_NAME handler
* Implamention of OP_CLAIM_NAME handler
* @see CClaimScriptOp::claimName
*/
bool claimName(CClaimTrieCache& trieCache, const std::string& name) override;
/**
* Implementation of OP_UPDATE_CLAIM handler
* Implamention of OP_UPDATE_CLAIM handler
* @see CClaimScriptOp::updateClaim
*/
bool updateClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId) override;
/**
* Implementation of OP_SUPPORT_CLAIM handler
* Implamention of OP_SUPPORT_CLAIM handler
* @see CClaimScriptOp::supportClaim
*/
bool supportClaim(CClaimTrieCache& trieCache, const std::string& name, const uint160& claimId) override;
@ -225,21 +225,26 @@ typedef std::pair<std::string, uint160> spentClaimType;
typedef std::vector<spentClaimType> spentClaimsType;
struct CUpdateCacheCallbacks
{
std::function<CScript(const COutPoint& point)> findScriptKey;
std::function<void(int, int)> claimUndoHeights;
};
/**
* Function to spend claim from tie, keeping the successful list on
* @param[in] tx transaction inputs/outputs
* @param[in] trieCache trie to operate on
* @param[in] view coins cache
* @param[in] scriptPubKey claim script to be decoded
* @param[in] point pair of transaction hash and its index
* @param[in] nHeight entry height of the claim
* @param[out] fallback optional callbacks
* @param[out] nValidHeight valid height of the claim
* @param[out] spentClaims inserts successfully spent claim
*/
void UpdateCache(const CTransaction& tx, CClaimTrieCache& trieCache, const CCoinsViewCache& view, int nHeight, const CUpdateCacheCallbacks& callbacks = {});
bool SpendClaim(CClaimTrieCache& trieCache, const CScript& scriptPubKey, const COutPoint& point, int nHeight, int& nValidHeight, spentClaimsType& spentClaims);
/**
* Function to add / update (that present in spent list) claim in trie
* @param[in] trieCache trie to operate on
* @param[in] scriptPubKey claim script to be decoded
* @param[in] point pair of transaction hash and its index
* @param[in] nValue ` value of the claim
* @param[in] nHeight entry height of the claim
* @param[out] spentClaims erases successfully added claim
*/
bool AddSpendClaim(CClaimTrieCache& trieCache, const CScript& scriptPubKey, const COutPoint& point, CAmount nValue, int nHeight, spentClaimsType& spentClaims);
#endif // CLAIMSCRIPTOP_H

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,67 +1,41 @@
#include "claimtrie.h"
#include <consensus/merkle.h>
#include <chainparams.h>
#include <claimtrie.h>
#include <hash.h>
#include <boost/locale.hpp>
#include <boost/algorithm/string.hpp>
#include <boost/foreach.hpp>
#include <boost/locale/conversion.hpp>
#include <boost/locale/localization_backend.hpp>
#include <boost/locale.hpp>
#include <boost/scope_exit.hpp>
#include <boost/scoped_ptr.hpp>
CClaimTrieCacheExpirationFork::CClaimTrieCacheExpirationFork(CClaimTrie* base)
: CClaimTrieCacheBase(base)
void CClaimTrieCacheExpirationFork::removeAndAddToExpirationQueue(expirationQueueRowType &row, int height, bool increment) const
{
setExpirationTime(Params().GetConsensus().GetExpirationTime(nNextHeight));
}
void CClaimTrieCacheExpirationFork::setExpirationTime(int time)
{
nExpirationTime = time;
}
int CClaimTrieCacheExpirationFork::expirationTime() const
{
return nExpirationTime;
}
bool CClaimTrieCacheExpirationFork::incrementBlock(insertUndoType& insertUndo, claimQueueRowType& expireUndo, insertUndoType& insertSupportUndo, supportQueueRowType& expireSupportUndo, std::vector<std::pair<std::string, int>>& takeoverHeightUndo)
{
if (CClaimTrieCacheBase::incrementBlock(insertUndo, expireUndo, insertSupportUndo, expireSupportUndo, takeoverHeightUndo)) {
setExpirationTime(Params().GetConsensus().GetExpirationTime(nNextHeight));
return true;
for (expirationQueueRowType::iterator e = row.begin(); e != row.end(); ++e)
{
// remove and insert with new expiration time
removeFromExpirationQueue(e->name, e->outPoint, height);
int extend_expiration = Params().GetConsensus().nExtendedClaimExpirationTime - Params().GetConsensus().nOriginalClaimExpirationTime;
int new_expiration_height = increment ? height + extend_expiration : height - extend_expiration;
nameOutPointType entry(e->name, e->outPoint);
addToExpirationQueue(new_expiration_height, entry);
}
return false;
}
bool CClaimTrieCacheExpirationFork::decrementBlock(insertUndoType& insertUndo, claimQueueRowType& expireUndo, insertUndoType& insertSupportUndo, supportQueueRowType& expireSupportUndo)
void CClaimTrieCacheExpirationFork::removeAndAddSupportToExpirationQueue(expirationQueueRowType &row, int height, bool increment) const
{
if (CClaimTrieCacheBase::decrementBlock(insertUndo, expireUndo, insertSupportUndo, expireSupportUndo)) {
setExpirationTime(Params().GetConsensus().GetExpirationTime(nNextHeight));
return true;
for (expirationQueueRowType::iterator e = row.begin(); e != row.end(); ++e)
{
// remove and insert with new expiration time
removeSupportFromExpirationQueue(e->name, e->outPoint, height);
int extend_expiration = Params().GetConsensus().nExtendedClaimExpirationTime - Params().GetConsensus().nOriginalClaimExpirationTime;
int new_expiration_height = increment ? height + extend_expiration : height - extend_expiration;
nameOutPointType entry(e->name, e->outPoint);
addSupportToExpirationQueue(new_expiration_height, entry);
}
return false;
}
void CClaimTrieCacheExpirationFork::initializeIncrement()
{
// we could do this in the constructor, but that would not allow for multiple increments in a row (as done in unit tests)
if (nNextHeight != Params().GetConsensus().nExtendedClaimExpirationForkHeight)
return;
forkForExpirationChange(true);
}
bool CClaimTrieCacheExpirationFork::finalizeDecrement(std::vector<std::pair<std::string, int>>& takeoverHeightUndo)
{
auto ret = CClaimTrieCacheBase::finalizeDecrement(takeoverHeightUndo);
if (ret && nNextHeight == Params().GetConsensus().nExtendedClaimExpirationForkHeight)
forkForExpirationChange(false);
return ret;
}
bool CClaimTrieCacheExpirationFork::forkForExpirationChange(bool increment)
bool CClaimTrieCacheExpirationFork::forkForExpirationChange(bool increment) const
{
/*
If increment is True, we have forked to extend the expiration time, thus items in the expiration queue
@ -71,40 +45,75 @@ bool CClaimTrieCacheExpirationFork::forkForExpirationChange(bool increment)
will have their expiration extension removed.
*/
// look through dirty expiration queues
std::set<int> dirtyHeights;
for (expirationQueueType::const_iterator i = base->dirtyExpirationQueueRows.begin(); i != base->dirtyExpirationQueueRows.end(); ++i)
{
int height = i->first;
dirtyHeights.insert(height);
expirationQueueRowType row = i->second;
removeAndAddToExpirationQueue(row, height, increment);
}
std::set<int> dirtySupportHeights;
for (expirationQueueType::const_iterator i = base->dirtySupportExpirationQueueRows.begin(); i != base->dirtySupportExpirationQueueRows.end(); ++i)
{
int height = i->first;
dirtySupportHeights.insert(height);
expirationQueueRowType row = i->second;
removeAndAddSupportToExpirationQueue(row, height, increment);
}
//look through db for expiration queues, if we haven't already found it in dirty expiration queue
boost::scoped_ptr<CDBIterator> pcursor(base->db->NewIterator());
for (pcursor->SeekToFirst(); pcursor->Valid(); pcursor->Next()) {
std::pair<uint8_t, int> key;
if (!pcursor->GetKey(key))
continue;
int height = key.second;
if (key.first == CLAIM_EXP_QUEUE_ROW) {
expirationQueueRowType row;
if (pcursor->GetValue(row)) {
reactivateClaim(row, height, increment);
} else {
return error("%s(): error reading expiration queue rows from disk", __func__);
boost::scoped_ptr<CDBIterator> pcursor(const_cast<CDBWrapper*>(&base->db)->NewIterator());
pcursor->SeekToFirst();
while (pcursor->Valid())
{
std::pair<char, int> key;
if (pcursor->GetKey(key))
{
int height = key.second;
// if we've looked through this in dirtyExprirationQueueRows, don't use it
// because its stale
if ((key.first == EXP_QUEUE_ROW) & (dirtyHeights.count(height) == 0))
{
expirationQueueRowType row;
if (pcursor->GetValue(row))
{
removeAndAddToExpirationQueue(row, height, increment);
}
else
{
return error("%s(): error reading expiration queue rows from disk", __func__);
}
}
} else if (key.first == SUPPORT_EXP_QUEUE_ROW) {
expirationQueueRowType row;
if (pcursor->GetValue(row)) {
reactivateSupport(row, height, increment);
} else {
return error("%s(): error reading support expiration queue rows from disk", __func__);
else if ((key.first == SUPPORT_EXP_QUEUE_ROW) & (dirtySupportHeights.count(height) == 0))
{
expirationQueueRowType row;
if (pcursor->GetValue(row))
{
removeAndAddSupportToExpirationQueue(row, height, increment);
}
else
{
return error("%s(): error reading support expiration queue rows from disk", __func__);
}
}
}
pcursor->Next();
}
return true;
}
bool CClaimTrieCacheNormalizationFork::shouldNormalize() const
{
return nNextHeight > Params().GetConsensus().nNormalizedNameForkHeight;
bool CClaimTrieCacheNormalizationFork::shouldNormalize() const {
return nCurrentHeight > Params().GetConsensus().nNormalizedNameForkHeight;
}
std::string CClaimTrieCacheNormalizationFork::normalizeClaimName(const std::string& name, bool force) const
{
std::string CClaimTrieCacheNormalizationFork::normalizeClaimName(const std::string& name, bool force) const {
if (!force && !shouldNormalize())
return name;
@ -112,7 +121,7 @@ std::string CClaimTrieCacheNormalizationFork::normalizeClaimName(const std::stri
static bool initialized = false;
if (!initialized) {
static boost::locale::localization_backend_manager manager =
boost::locale::localization_backend_manager::global();
boost::locale::localization_backend_manager::global();
manager.select("icu");
static boost::locale::generator curLocale(manager);
@ -122,6 +131,7 @@ std::string CClaimTrieCacheNormalizationFork::normalizeClaimName(const std::stri
std::string normalized;
try {
// Check if it is a valid utf-8 string. If not, it will throw a
// boost::locale::conv::conversion_error exception which we catch later
normalized = boost::locale::conv::to_utf<char>(name, "UTF-8", boost::locale::conv::stop);
@ -131,12 +141,15 @@ std::string CClaimTrieCacheNormalizationFork::normalizeClaimName(const std::stri
// these methods supposedly only use the "UTF8" portion of the locale object:
normalized = boost::locale::normalize(normalized, boost::locale::norm_nfd, utf8);
normalized = boost::locale::fold_case(normalized, utf8);
} catch (const boost::locale::conv::conversion_error& e) {
}
catch (const boost::locale::conv::conversion_error& e){
return name;
} catch (const std::bad_cast& e) {
}
catch (const std::bad_cast& e) {
LogPrintf("%s() is invalid or dependencies are missing: %s\n", __func__, e.what());
throw;
} catch (const std::exception& e) { // TODO: change to use ... with current_exception() in c++11
}
catch (const std::exception& e) { // TODO: change to use ... with current_exception() in c++11
LogPrintf("%s() had an unexpected exception: %s\n", __func__, e.what());
return name;
}
@ -144,338 +157,144 @@ std::string CClaimTrieCacheNormalizationFork::normalizeClaimName(const std::stri
return normalized;
}
bool CClaimTrieCacheNormalizationFork::insertClaimIntoTrie(const std::string& name, const CClaimValue& claim, bool fCheckTakeover)
{
bool CClaimTrieCacheNormalizationFork::insertClaimIntoTrie(const std::string& name, CClaimValue claim,
bool fCheckTakeover) const {
return CClaimTrieCacheExpirationFork::insertClaimIntoTrie(normalizeClaimName(name, overrideInsertNormalization), claim, fCheckTakeover);
}
bool CClaimTrieCacheNormalizationFork::removeClaimFromTrie(const std::string& name, const COutPoint& outPoint, CClaimValue& claim, bool fCheckTakeover)
{
bool CClaimTrieCacheNormalizationFork::removeClaimFromTrie(const std::string& name, const COutPoint& outPoint,
CClaimValue& claim, bool fCheckTakeover) const {
return CClaimTrieCacheExpirationFork::removeClaimFromTrie(normalizeClaimName(name, overrideRemoveNormalization), outPoint, claim, fCheckTakeover);
}
bool CClaimTrieCacheNormalizationFork::insertSupportIntoMap(const std::string& name, const CSupportValue& support, bool fCheckTakeover)
{
bool CClaimTrieCacheNormalizationFork::insertSupportIntoMap(const std::string& name, CSupportValue support,
bool fCheckTakeover) const {
return CClaimTrieCacheExpirationFork::insertSupportIntoMap(normalizeClaimName(name, overrideInsertNormalization), support, fCheckTakeover);
}
bool CClaimTrieCacheNormalizationFork::removeSupportFromMap(const std::string& name, const COutPoint& outPoint, CSupportValue& support, bool fCheckTakeover)
{
bool CClaimTrieCacheNormalizationFork::removeSupportFromMap(const std::string& name, const COutPoint& outPoint,
CSupportValue& support, bool fCheckTakeover) const {
return CClaimTrieCacheExpirationFork::removeSupportFromMap(normalizeClaimName(name, overrideRemoveNormalization), outPoint, support, fCheckTakeover);
}
bool CClaimTrieCacheNormalizationFork::normalizeAllNamesInTrieIfNecessary(insertUndoType& insertUndo, claimQueueRowType& removeUndo, insertUndoType& insertSupportUndo, supportQueueRowType& expireSupportUndo, std::vector<std::pair<std::string, int>>& takeoverHeightUndo)
{
if (nNextHeight != Params().GetConsensus().nNormalizedNameForkHeight)
return false;
struct claimsForNormalization: public claimsForNameType {
std::string normalized;
claimsForNormalization(const std::vector<CClaimValue>& claims, const std::vector<CSupportValue>& supports,
int nLastTakeoverHeight, const std::string& name, const std::string& normalized)
: claimsForNameType(claims, supports, nLastTakeoverHeight, name), normalized(normalized) {}
};
// run the one-time upgrade of all names that need to change
// it modifies the (cache) trie as it goes, so we need to grab everything to be modified first
bool CClaimTrieCacheNormalizationFork::normalizeAllNamesInTrieIfNecessary(insertUndoType& insertUndo, claimQueueRowType& removeUndo,
insertUndoType& insertSupportUndo, supportQueueRowType& expireSupportUndo,
std::vector<std::pair<std::string, int> >& takeoverHeightUndo) const {
for (auto it = base->cbegin(); it != base->cend(); ++it) {
const std::string normalized = normalizeClaimName(it.key(), true);
if (normalized == it.key())
continue;
struct CNameChangeDetector: public CNodeCallback {
std::vector<claimsForNormalization> hits;
const CClaimTrieCacheNormalizationFork* owner;
CNameChangeDetector(const CClaimTrieCacheNormalizationFork* owner): owner(owner) {}
void visit(const std::string& name, const CClaimTrieNode* node) {
if (node->claims.empty()) return;
const std::string normalized = owner->normalizeClaimName(name, true);
if (normalized == name) return;
auto& name = it.key();
auto supports = getSupportsForName(name);
for (auto support : supports) {
// if it's already going to expire just skip it
if (support.nHeight + expirationTime() <= nNextHeight)
continue;
assert(removeSupportFromMap(name, support.outPoint, support, false));
expireSupportUndo.emplace_back(name, support);
assert(insertSupportIntoMap(normalized, support, false));
insertSupportUndo.emplace_back(name, support.outPoint, -1);
supportMapEntryType supports;
owner->getSupportsForName(name, supports);
const claimsForNormalization cfn(node->claims, supports, node->nHeightOfLastTakeover, name, normalized);
hits.push_back(cfn);
}
};
namesToCheckForTakeover.insert(normalized);
if (nCurrentHeight == Params().GetConsensus().nNormalizedNameForkHeight) {
auto cached = cacheData(name, false);
if (!cached || cached->empty())
continue;
// run the one-time upgrade of all names that need to change
// it modifies the (cache) trie as it goes, so we need to grab
// everything to be modified first
CNameChangeDetector detector(this);
iterateTrie(detector);
auto claimsCopy = cached->claims;
auto takeoverHeightCopy = cached->nHeightOfLastTakeover;
for (auto claim : claimsCopy) {
if (claim.nHeight + expirationTime() <= nNextHeight)
continue;
for (std::vector<claimsForNormalization>::iterator it = detector.hits.begin(); it != detector.hits.end(); ++it) {
BOOST_FOREACH(CSupportValue support, it->supports) {
// if it's already going to expire just skip it
if (support.nHeight + base->nExpirationTime <= nCurrentHeight)
continue;
assert(removeClaimFromTrie(name, claim.outPoint, claim, false));
removeUndo.emplace_back(name, claim);
assert(insertClaimIntoTrie(normalized, claim, true));
insertUndo.emplace_back(name, claim.outPoint, -1);
bool success = removeSupportFromMap(it->name, support.outPoint, support, false);
assert(success);
expireSupportUndo.push_back(std::make_pair(it->name, support));
success = insertSupportIntoMap(it->normalized, support, false);
assert(success);
insertSupportUndo.push_back(nameOutPointHeightType(it->name, support.outPoint, -1));
}
BOOST_FOREACH(CClaimValue claim, it->claims) {
if (claim.nHeight + base->nExpirationTime <= nCurrentHeight)
continue;
bool success = removeClaimFromTrie(it->name, claim.outPoint, claim, false);
assert(success);
removeUndo.push_back(std::make_pair(it->name, claim));
success = insertClaimIntoTrie(it->normalized, claim, true);
assert(success);
insertUndo.push_back(nameOutPointHeightType(it->name, claim.outPoint, -1));
}
takeoverHeightUndo.push_back(std::make_pair(it->name, it->nLastTakeoverHeight));
}
takeoverHeightUndo.emplace_back(name, takeoverHeightCopy);
return true;
}
return true;
return false;
}
bool CClaimTrieCacheNormalizationFork::incrementBlock(insertUndoType& insertUndo, claimQueueRowType& expireUndo, insertUndoType& insertSupportUndo, supportQueueRowType& expireSupportUndo, std::vector<std::pair<std::string, int>>& takeoverHeightUndo)
{
overrideInsertNormalization = normalizeAllNamesInTrieIfNecessary(insertUndo, expireUndo, insertSupportUndo, expireSupportUndo, takeoverHeightUndo);
BOOST_SCOPE_EXIT(&overrideInsertNormalization) { overrideInsertNormalization = false; }
BOOST_SCOPE_EXIT_END
return CClaimTrieCacheExpirationFork::incrementBlock(insertUndo, expireUndo, insertSupportUndo, expireSupportUndo, takeoverHeightUndo);
bool CClaimTrieCacheNormalizationFork::incrementBlock(insertUndoType& insertUndo,
claimQueueRowType& expireUndo,
insertUndoType& insertSupportUndo,
supportQueueRowType& expireSupportUndo,
std::vector<std::pair<std::string, int> >& takeoverHeightUndo) {
overrideInsertNormalization = normalizeAllNamesInTrieIfNecessary(insertUndo, expireUndo, insertSupportUndo,
expireSupportUndo, takeoverHeightUndo);
BOOST_SCOPE_EXIT(&overrideInsertNormalization) { overrideInsertNormalization = false; } BOOST_SCOPE_EXIT_END
return CClaimTrieCacheExpirationFork::incrementBlock(insertUndo, expireUndo, insertSupportUndo,
expireSupportUndo, takeoverHeightUndo);
}
bool CClaimTrieCacheNormalizationFork::decrementBlock(insertUndoType& insertUndo, claimQueueRowType& expireUndo, insertUndoType& insertSupportUndo, supportQueueRowType& expireSupportUndo)
{
bool CClaimTrieCacheNormalizationFork::decrementBlock(insertUndoType& insertUndo,
claimQueueRowType& expireUndo,
insertUndoType& insertSupportUndo,
supportQueueRowType& expireSupportUndo,
std::vector<std::pair<std::string, int> >& takeoverHeightUndo) {
overrideRemoveNormalization = shouldNormalize();
BOOST_SCOPE_EXIT(&overrideRemoveNormalization) { overrideRemoveNormalization = false; }
BOOST_SCOPE_EXIT_END
return CClaimTrieCacheExpirationFork::decrementBlock(insertUndo, expireUndo, insertSupportUndo, expireSupportUndo);
BOOST_SCOPE_EXIT(&overrideRemoveNormalization) { overrideRemoveNormalization = false; } BOOST_SCOPE_EXIT_END
return CClaimTrieCacheExpirationFork::decrementBlock(insertUndo, expireUndo, insertSupportUndo,
expireSupportUndo, takeoverHeightUndo);
}
bool CClaimTrieCacheNormalizationFork::getProofForName(const std::string& name, CClaimTrieProof& proof)
{
bool CClaimTrieCacheNormalizationFork::getProofForName(const std::string& name, CClaimTrieProof& proof) const {
return CClaimTrieCacheExpirationFork::getProofForName(normalizeClaimName(name), proof);
}
bool CClaimTrieCacheNormalizationFork::getInfoForName(const std::string& name, CClaimValue& claim) const
{
bool CClaimTrieCacheNormalizationFork::getInfoForName(const std::string& name, CClaimValue& claim) const {
return CClaimTrieCacheExpirationFork::getInfoForName(normalizeClaimName(name), claim);
}
CClaimSupportToName CClaimTrieCacheNormalizationFork::getClaimsForName(const std::string& name) const
{
claimsForNameType CClaimTrieCacheNormalizationFork::getClaimsForName(const std::string& name) const {
return CClaimTrieCacheExpirationFork::getClaimsForName(normalizeClaimName(name));
}
int CClaimTrieCacheNormalizationFork::getDelayForName(const std::string& name, const uint160& claimId) const
{
int CClaimTrieCacheNormalizationFork::getDelayForName(const std::string& name, const uint160& claimId) const {
return CClaimTrieCacheExpirationFork::getDelayForName(normalizeClaimName(name), claimId);
}
std::string CClaimTrieCacheNormalizationFork::adjustNameForValidHeight(const std::string& name, int validHeight) const
{
void CClaimTrieCacheNormalizationFork::addClaimToQueues(const std::string& name, CClaimValue& claim) const {
return CClaimTrieCacheExpirationFork::addClaimToQueues(normalizeClaimName(name,
claim.nValidAtHeight > Params().GetConsensus().nNormalizedNameForkHeight), claim);
}
bool CClaimTrieCacheNormalizationFork::addSupportToQueues(const std::string& name, CSupportValue& support) const {
return CClaimTrieCacheExpirationFork::addSupportToQueues(normalizeClaimName(name,
support.nValidAtHeight > Params().GetConsensus().nNormalizedNameForkHeight), support);
}
std::string CClaimTrieCacheNormalizationFork::adjustNameForValidHeight(const std::string& name, int validHeight) const {
return normalizeClaimName(name, validHeight > Params().GetConsensus().nNormalizedNameForkHeight);
}
CClaimTrieCacheHashFork::CClaimTrieCacheHashFork(CClaimTrie* base) : CClaimTrieCacheNormalizationFork(base)
{
}
static const uint256 leafHash = uint256S("0000000000000000000000000000000000000000000000000000000000000002");
static const uint256 emptyHash = uint256S("0000000000000000000000000000000000000000000000000000000000000003");
std::vector<uint256> getClaimHashes(const CClaimTrieData& data)
{
std::vector<uint256> hashes;
for (auto& claim : data.claims)
hashes.push_back(getValueHash(claim.outPoint, data.nHeightOfLastTakeover));
return hashes;
}
template <typename T>
using iCbType = std::function<uint256(T&)>;
template <typename TIterator>
uint256 recursiveBinaryTreeHash(TIterator& it, const iCbType<TIterator>& process)
{
std::vector<uint256> childHashes;
for (auto& child : it.children())
childHashes.emplace_back(process(child));
std::vector<uint256> claimHashes;
if (!it->empty())
claimHashes = getClaimHashes(it.data());
else if (!it.hasChildren())
return {};
auto left = childHashes.empty() ? leafHash : ComputeMerkleRoot(childHashes);
auto right = claimHashes.empty() ? emptyHash : ComputeMerkleRoot(claimHashes);
return Hash(left.begin(), left.end(), right.begin(), right.end());
}
uint256 CClaimTrieCacheHashFork::recursiveComputeMerkleHash(CClaimTrie::iterator& it)
{
if (nNextHeight < Params().GetConsensus().nAllClaimsInMerkleForkHeight)
return CClaimTrieCacheNormalizationFork::recursiveComputeMerkleHash(it);
using iterator = CClaimTrie::iterator;
iCbType<iterator> process = [&process](iterator& it) -> uint256 {
if (it->hash.IsNull())
it->hash = recursiveBinaryTreeHash(it, process);
assert(!it->hash.IsNull());
return it->hash;
};
return process(it);
}
bool CClaimTrieCacheHashFork::recursiveCheckConsistency(CClaimTrie::const_iterator& it, std::string& failed) const
{
if (nNextHeight < Params().GetConsensus().nAllClaimsInMerkleForkHeight)
return CClaimTrieCacheNormalizationFork::recursiveCheckConsistency(it, failed);
struct CRecursiveBreak {};
using iterator = CClaimTrie::const_iterator;
iCbType<iterator> process = [&failed, &process](iterator& it) -> uint256 {
if (it->hash.IsNull() || it->hash != recursiveBinaryTreeHash(it, process)) {
failed = it.key();
throw CRecursiveBreak();
}
return it->hash;
};
try {
process(it);
} catch (const CRecursiveBreak&) {
return false;
}
return true;
}
std::vector<uint256> ComputeMerklePath(const std::vector<uint256>& hashes, uint32_t idx)
{
uint32_t count = 0;
int matchlevel = -1;
bool matchh = false;
uint256 inner[32], h;
const uint32_t one = 1;
std::vector<uint256> res;
const auto iterateInner = [&](int& level) {
for (; !(count & (one << level)); level++) {
const auto& ihash = inner[level];
if (matchh) {
res.push_back(ihash);
} else if (matchlevel == level) {
res.push_back(h);
matchh = true;
}
h = Hash(ihash.begin(), ihash.end(), h.begin(), h.end());
}
};
while (count < hashes.size()) {
h = hashes[count];
matchh = count == idx;
count++;
int level = 0;
iterateInner(level);
// Store the resulting hash at inner position level.
inner[level] = h;
if (matchh)
matchlevel = level;
}
int level = 0;
while (!(count & (one << level)))
level++;
h = inner[level];
matchh = matchlevel == level;
while (count != (one << level)) {
// If we reach this point, h is an inner value that is not the top.
if (matchh)
res.push_back(h);
h = Hash(h.begin(), h.end(), h.begin(), h.end());
// Increment count to the value it would have if two entries at this
count += (one << level);
level++;
iterateInner(level);
}
return res;
}
bool CClaimTrieCacheHashFork::getProofForName(const std::string& name, CClaimTrieProof& proof)
{
return getProofForName(name, proof, nullptr);
}
bool CClaimTrieCacheHashFork::getProofForName(const std::string& name, CClaimTrieProof& proof, const std::function<bool(const CClaimValue&)>& comp)
{
if (nNextHeight < Params().GetConsensus().nAllClaimsInMerkleForkHeight)
return CClaimTrieCacheNormalizationFork::getProofForName(name, proof);
auto fillPairs = [&proof](const std::vector<uint256>& hashes, uint32_t idx) {
auto partials = ComputeMerklePath(hashes, idx);
for (int i = partials.size() - 1; i >= 0; --i)
proof.pairs.emplace_back((idx >> i) & 1, partials[i]);
};
// cache the parent nodes
cacheData(name, false);
getMerkleHash();
proof = CClaimTrieProof();
for (auto& it : static_cast<const CClaimTrie&>(nodesToAddOrUpdate).nodes(name)) {
std::vector<uint256> childHashes;
uint32_t nextCurrentIdx = 0;
for (auto& child : it.children()) {
if (name.find(child.key()) == 0)
nextCurrentIdx = uint32_t(childHashes.size());
childHashes.push_back(child->hash);
}
std::vector<uint256> claimHashes;
if (!it->empty())
claimHashes = getClaimHashes(it.data());
// I am on a node; I need a hash(children, claims)
// if I am the last node on the list, it will be hash(children, x)
// else it will be hash(x, claims)
if (it.key() == name) {
uint32_t nClaimIndex = 0;
auto& claims = it->claims;
auto itClaim = !comp ? claims.begin() : std::find_if(claims.begin(), claims.end(), comp);
if (itClaim != claims.end()) {
proof.hasValue = true;
proof.outPoint = itClaim->outPoint;
proof.nHeightOfLastTakeover = it->nHeightOfLastTakeover;
nClaimIndex = std::distance(claims.begin(), itClaim);
}
auto hash = childHashes.empty() ? leafHash : ComputeMerkleRoot(childHashes);
proof.pairs.emplace_back(true, hash);
if (!claimHashes.empty())
fillPairs(claimHashes, nClaimIndex);
} else {
auto hash = claimHashes.empty() ? emptyHash : ComputeMerkleRoot(claimHashes);
proof.pairs.emplace_back(false, hash);
if (!childHashes.empty())
fillPairs(childHashes, nextCurrentIdx);
}
}
std::reverse(proof.pairs.begin(), proof.pairs.end());
return true;
}
void CClaimTrieCacheHashFork::copyAllBaseToCache()
{
for (auto it = base->cbegin(); it != base->cend(); ++it)
if (nodesAlreadyCached.insert(it.key()).second)
nodesToAddOrUpdate.insert(it.key(), it.data());
for (auto it = nodesToAddOrUpdate.begin(); it != nodesToAddOrUpdate.end(); ++it)
it->hash.SetNull();
}
void CClaimTrieCacheHashFork::initializeIncrement()
{
CClaimTrieCacheNormalizationFork::initializeIncrement();
// we could do this in the constructor, but that would not allow for multiple increments in a row (as done in unit tests)
if (nNextHeight != Params().GetConsensus().nAllClaimsInMerkleForkHeight - 1)
return;
// if we are forking, we load the entire base trie into the cache trie
// we reset its hash computation so it can be recomputed completely
copyAllBaseToCache();
}
bool CClaimTrieCacheHashFork::finalizeDecrement(std::vector<std::pair<std::string, int>>& takeoverHeightUndo)
{
auto ret = CClaimTrieCacheNormalizationFork::finalizeDecrement(takeoverHeightUndo);
if (ret && nNextHeight == Params().GetConsensus().nAllClaimsInMerkleForkHeight - 1)
copyAllBaseToCache();
return ret;
}
bool CClaimTrieCacheHashFork::allowSupportMetadata() const
{
return nNextHeight >= Params().GetConsensus().nAllClaimsInMerkleForkHeight;
}

View file

@ -77,12 +77,6 @@ struct Params {
int nAllowMinDiffMinHeight;
int nAllowMinDiffMaxHeight;
int nNormalizedNameForkHeight;
int nMinTakeoverWorkaroundHeight;
int nMaxTakeoverWorkaroundHeight;
int nWitnessForkHeight;
int64_t nPowTargetSpacing;
int64_t nPowTargetTimespan;
/** how long it took claims to expire before the hard fork */
@ -96,8 +90,6 @@ struct Params {
nOriginalClaimExpirationTime :
nExtendedClaimExpirationTime;
}
/** blocks before the hard fork that adds all claims into the merkle hash */
int64_t nAllClaimsInMerkleForkHeight;
int64_t DifficultyAdjustmentInterval() const { return nPowTargetTimespan / nPowTargetSpacing; }
uint256 nMinimumChainWork;
uint256 defaultAssumeValid;

View file

@ -154,8 +154,7 @@ int64_t GetTransactionSigOpCost(const CTransaction& tx, const CCoinsViewCache& i
const Coin& coin = inputs.AccessCoin(tx.vin[i].prevout);
assert(!coin.IsSpent());
const CTxOut &prevout = coin.out;
const CScript& scriptPubKey = StripClaimScriptPrefix(prevout.scriptPubKey);
nSigOps += CountWitnessSigOps(tx.vin[i].scriptSig, scriptPubKey, &tx.vin[i].scriptWitness, flags);
nSigOps += CountWitnessSigOps(tx.vin[i].scriptSig, prevout.scriptPubKey, &tx.vin[i].scriptWitness, flags);
}
return nSigOps;
}

View file

@ -15,7 +15,6 @@
#include <util.h>
#include <utilmoneystr.h>
#include <utilstrencodings.h>
#include <nameclaim.h>
UniValue ValueFromAmount(const CAmount& amount)
{
@ -148,20 +147,12 @@ void ScriptToUniv(const CScript& script, UniValue& out, bool include_address)
out.pushKV("hex", HexStr(script.begin(), script.end()));
std::vector<std::vector<unsigned char>> solns;
txnouttype type; int claimOp;
auto stripped = StripClaimScriptPrefix(script, claimOp);
Solver(stripped, type, solns);
if (claimOp >= 0) {
out.pushKV("isclaim", UniValue(claimOp == OP_CLAIM_NAME || claimOp == OP_UPDATE_CLAIM));
out.pushKV("issupport", UniValue(claimOp == OP_SUPPORT_CLAIM));
out.pushKV("subtype", GetTxnOutputType(type));
out.pushKV("type", GetTxnOutputType(TX_NONSTANDARD)); // trying to keep backwards compatibility
}
else
out.pushKV("type", GetTxnOutputType(type)); // trying to keep backwards compatibility
txnouttype type;
Solver(script, type, solns);
out.pushKV("type", GetTxnOutputType(type));
CTxDestination address;
if (include_address && ExtractDestination(stripped, address)) {
if (include_address && ExtractDestination(script, address)) {
out.pushKV("address", EncodeDestination(address));
}
}
@ -177,28 +168,19 @@ void ScriptPubKeyToUniv(const CScript& scriptPubKey,
if (fIncludeHex)
out.pushKV("hex", HexStr(scriptPubKey.begin(), scriptPubKey.end()));
int claimOp;
auto stripped = StripClaimScriptPrefix(scriptPubKey, claimOp);
auto extracted = ExtractDestinations(stripped, type, addresses, nRequired);
if (extracted)
out.pushKV("reqSigs", nRequired);
if (claimOp >= 0) {
out.pushKV("isclaim", UniValue(claimOp == OP_CLAIM_NAME || claimOp == OP_UPDATE_CLAIM));
out.pushKV("issupport", UniValue(claimOp == OP_SUPPORT_CLAIM));
out.pushKV("subtype", GetTxnOutputType(type));
out.pushKV("type", GetTxnOutputType(TX_NONSTANDARD));
}
else
if (!ExtractDestinations(scriptPubKey, type, addresses, nRequired)) {
out.pushKV("type", GetTxnOutputType(type));
if (extracted) {
UniValue a(UniValue::VARR);
for (const CTxDestination &addr : addresses) {
a.push_back(EncodeDestination(addr));
}
out.pushKV("addresses", a);
return;
}
out.pushKV("reqSigs", nRequired);
out.pushKV("type", GetTxnOutputType(type));
UniValue a(UniValue::VARR);
for (const CTxDestination& addr : addresses) {
a.push_back(EncodeDestination(addr));
}
out.pushKV("addresses", a);
}
void TxToUniv(const CTransaction& tx, const uint256& hashBlock, UniValue& entry, bool include_hex, int serialize_flags)

Some files were not shown because too many files have changed in this diff Show more