Add driver for clickhouse support

A driver for the DBMS ClickHouse was added.
A PHP ClickHouse library is used for this.
This commit is contained in:
PascalWithopf 2018-10-04 12:14:59 +02:00
parent a9854e58ef
commit 719ea23b97
96 changed files with 11486 additions and 90 deletions

View File

@ -45,6 +45,7 @@ if ( !defined('IN_PHPLOGCON') )
// --- Required Includes!
require_once($gl_root_path . 'include/constants_errors.php');
require_once($gl_root_path . 'classes/phpClickHouse/include.php');
// ---
class LogStreamClickHouse extends LogStream {
@ -67,12 +68,6 @@ class LogStreamClickHouse extends LogStream {
public function __construct ($streamConfigObj) {
$this->_logStreamConfigObj = $streamConfigObj;
if ( $this->_logStreamConfigObj->DBType == DB_MYSQL )
{
// Probe if a function exists!
if ( !function_exists("mysqli_connect") )
DieWithFriendlyErrorMsg("Error, MYSQL Extensions are not enabled! Function 'mysqli_connect' does not exist.");
}
}
public function LogStreamClickHouse($streamConfigObj) {
self::__construct($streamConfigObj);
@ -88,7 +83,7 @@ class LogStreamClickHouse extends LogStream {
{
global $dbmapping;
// Initialise Basic stuff within the Classs
// Initialise Basic stuff within the Class
$this->RunBasicInits();
// Verify database connection (This also opens the database!)
@ -99,7 +94,7 @@ class LogStreamClickHouse extends LogStream {
// Copy the Property Array
$this->_arrProperties = $arrProperties;
// Check if DB Mapping exists
// Check if DB Mapping exists TODO: Default database field mapping?
if ( !isset($dbmapping[ $this->_logStreamConfigObj->DBTableType ]) )
return ERROR_DB_INVALIDDBMAPPING;
@ -131,8 +126,6 @@ class LogStreamClickHouse extends LogStream {
*/
public function Close()
{
if ($this->_dbhandle)
mysqli_close($this->_dbhandle);
$this->_dbhandle = null;
return SUCCESS;
}
@ -146,8 +139,15 @@ class LogStreamClickHouse extends LogStream {
// Try to connect to the database
if ( $this->_dbhandle == null )
{
// Forces to open a new link in all cases!
$this->_dbhandle = @mysqli_connect($this->_logStreamConfigObj->DBServer,$this->_logStreamConfigObj->DBUser,$this->_logStreamConfigObj->DBPassword);
// Create config
$config = [
'host' => $this->_logStreamConfigObj->DBServer,
'port' => $this->_logStreamConfigObj->DBPort,
'username' => $this->_logStreamConfigObj->DBUser,
'password' => $this->_logStreamConfigObj->DBPassword
];
// Open Connection
$this->_dbhandle = new ClickHouseDB\Client($config);
if (!$this->_dbhandle)
{
if ( isset($php_errormsg) )
@ -162,8 +162,7 @@ class LogStreamClickHouse extends LogStream {
}
// Select the database now!
$bRet = @mysqli_select_db($this->_dbhandle, $this->_logStreamConfigObj->DBName);
if(!$bRet)
if(!$this->_dbhandle->database($this->_logStreamConfigObj->DBName))
{
if ( isset($php_errormsg) )
{
@ -176,8 +175,8 @@ class LogStreamClickHouse extends LogStream {
}
// Check if the table exists!
$numTables = @mysqli_num_rows( mysqli_query($this->_dbhandle, "SHOW TABLES LIKE '%" . $this->_logStreamConfigObj->DBTableName . "%'"));
if( $numTables <= 0 )
$tables = $this->_dbhandle->isExists($this->_logStreamConfigObj->DBName, $this->_logStreamConfigObj->DBTableName);
if(!$tables)
return ERROR_DB_TABLENOTFOUND;
// reached this point means success ;)!
@ -686,15 +685,12 @@ class LogStreamClickHouse extends LogStream {
return $this->_firstPageUID;
$szSql = "SELECT MAX(" . $dbmapping[$szTableType]['DBMAPPINGS'][SYSLOG_UID] . ") FROM `" . $this->_logStreamConfigObj->DBTableName . "` " . $this->_SQLwhereClause;
$myQuery = mysqli_query($this->_dbhandle, $szSql);
$myQuery = $this->_dbhandle->select($szSql);
if ($myQuery)
{
// obtain first and only row
$myRow = mysqli_fetch_row($myQuery);
$this->_firstPageUID = $myRow[0];
$this->firstPageUID = $myQuery->fetchone();
// Free query now
mysqli_free_result ($myQuery);
// Increment for the Footer Stats
$querycount++;
@ -718,15 +714,15 @@ class LogStreamClickHouse extends LogStream {
return $this->_lastPageUID;
$szSql = "SELECT MIN(" . $dbmapping[$szTableType]['DBMAPPINGS'][SYSLOG_UID] . ") FROM `" . $this->_logStreamConfigObj->DBTableName . "` " . $this->_SQLwhereClause;
$myQuery = mysqli_query($this->_dbhandle, $szSql);
$myQuery = $this->_dbhandle->select($szSql);
if ($myQuery)
{
// obtain first and only row
$myRow = mysqli_fetch_row($myQuery);
$this->_lastPageUID = $myRow[0];
$this->firstPageUID = $myQuery->fetchone();
// Free query now
mysqli_free_result ($myQuery);
//mysqli_free_result ($myQuery);
// Increment for the Footer Stats
$querycount++;
@ -840,15 +836,14 @@ class LogStreamClickHouse extends LogStream {
{
// SHOW TABLE STATUS FROM
$szSql = "SELECT count(" . $dbmapping[$szTableType]['DBMAPPINGS'][SYSLOG_UID] . ") as Counter FROM `" . $this->_logStreamConfigObj->DBTableName . "`";
$myQuery = mysqli_query($this->_dbhandle, $szSql);
$myQuery = $this->_dbhandle->select($szSql);
if ($myQuery)
{
// Obtain RowCount!
$myRow = mysqli_fetch_row($myQuery);
$rowcount = $myRow[0];
$rowcount = $myQuery->fetchone();
// Free query now
mysqli_free_result ($myQuery);
//mysqli_free_result ($myQuery);
// Increment for the Footer Stats
$querycount++;
@ -1353,6 +1348,7 @@ class LogStreamClickHouse extends LogStream {
* This function expects the filters to already being set earlier.
* Otherwise no usual WHERE Clause can be created!
*/
// TODO: Create SQL Clause
private function CreateSQLWhereClause()
{
if ( $this->_filters != null )
@ -1602,7 +1598,7 @@ class LogStreamClickHouse extends LogStream {
if ( $this->_myDBQuery != null )
{
// Free Query ressources
mysqli_free_result ($this->_myDBQuery);
//mysqli_free_result ($this->_myDBQuery);
$this->_myDBQuery = null;
}
@ -1619,7 +1615,6 @@ class LogStreamClickHouse extends LogStream {
// Clear SQL Query first!
$this->DestroyMainSQLQuery();
// return error if there was one!
if ( ($res = $this->CreateMainSQLQuery($uID)) != SUCCESS )
return $res;
@ -1629,15 +1624,16 @@ class LogStreamClickHouse extends LogStream {
// Copy rows into the buffer!
$iBegin = $this->_currentRecordNum;
while ($myRow = mysqli_fetch_array($this->_myDBQuery, MYSQLI_ASSOC))
// Pascal: Hier werden die einzelnen Datensätze geholt, die while-Schleife sorgt dafür, dass keine
// Datensätze doppelt gelesen werden
while($iBegin < $this->_myDBQuery->countAll())
{
// Check if result was successfull!
if ( $myRow === FALSE || !$myRow )
break;
// Keys need to be converted into lowercase!
$this->bufferedRecords[$iBegin] = array_change_key_case($myRow, CASE_LOWER);
$iBegin++;
$rows = $this->_myDBQuery->rows();
foreach($rows as $myRow) {
$this->bufferedRecords[$iBegin] = array_change_key_case($myRow, CASE_LOWER);
$iBegin++;
}
}
// --- Check if results were found
@ -1679,38 +1675,19 @@ class LogStreamClickHouse extends LogStream {
// ---
// Perform Database Query
$this->_myDBQuery = mysqli_query($this->_dbhandle, $szSql);
if ( !$this->_myDBQuery )
{
// Check if a field is missing!
if ( mysqli_errno($this->_dbhandle) == 1054 )
{
// Handle missing field and try again!
if ( $this->HandleMissingField() == SUCCESS )
{
$this->_myDBQuery = mysqli_query($this->_dbhandle, $szSql);
if ( !$this->_myDBQuery ) {
$this->PrintDebugError("Invalid SQL: ".$szSql);
return ERROR_DB_QUERYFAILED;
}
}
else // Failed to add field dynamically
return ERROR_DB_QUERYFAILED;
}
else
{
$this->PrintDebugError("Invalid SQL: ".$szSql);
return ERROR_DB_QUERYFAILED;
}
try {
$this->_myDBQuery = $this->_dbhandle->select($szSql);
}
else
catch(ClickHouseDB\Exception\QueryException $E) {
$this->PrintDebugError("Error: " . $E->getMessage() . "\nOK\n");
return ERROR_DB_QUERYFAILED;
}
// Skip one entry in this case
if ( $this->_currentRecordStart > 0 )
{
// Skip one entry in this case
if ( $this->_currentRecordStart > 0 )
{
// Throw away
$myRow = mysqli_fetch_array($this->_myDBQuery, MYSQLI_ASSOC);
}
// Throw away
$myRow = $this->_myDBQuery->fetchOne();
}
// Increment for the Footer Stats
@ -1808,12 +1785,7 @@ class LogStreamClickHouse extends LogStream {
{
global $extraErrorDescription;
$errdesc = mysqli_error($this->_dbhandle);
$errno = mysqli_errno($this->_dbhandle);
$errormsg="$szErrorMsg <br>";
$errormsg.="Detail error: $errdesc <br>";
$errormsg.="Error Code: $errno <br>";
// Add to additional error output
$extraErrorDescription = $errormsg;
@ -1827,10 +1799,9 @@ class LogStreamClickHouse extends LogStream {
*/
private function GetRowCountByString($szQuery)
{
if ($myQuery = mysqli_query($this->_dbhandle, $szQuery))
if ($myQuery = $this->_dbhandle->select($szQuery))
{
$num_rows = mysqli_num_rows($myQuery);
mysqli_free_result ($myQuery);
$num_rows = $myQuery->count();
}
return $num_rows;
}
@ -1840,7 +1811,7 @@ class LogStreamClickHouse extends LogStream {
*/
private function GetRowCountByQueryID($myQuery)
{
$num_rows = mysqli_num_rows($myQuery);
$num_rows = $myQuery->count();
return $num_rows;
}
@ -1849,13 +1820,9 @@ class LogStreamClickHouse extends LogStream {
*/
private function GetRowCountFromTable()
{
if ( $myquery = mysqli_query($this->_dbhandle, "Select FOUND_ROWS();") )
if ( $myquery = $this->_dbhandle->select("Select FOUND_ROWS();") )
{
// Get first and only row!
$myRow = mysqli_fetch_array($myquery);
// copy row count
$numRows = $myRow[0];
$numRows = $myquery->count();
}
else
$numRows = -1;

View File

@ -40,11 +40,11 @@ if ( !defined('IN_PHPLOGCON') )
class LogStreamConfigClickHouse extends LogStreamConfig {
public $DBServer = '127.0.0.1';
public $DBPort = 3306;
public $DBPort = 8123;
public $DBName = '';
public $DBUser = '';
public $DBPassword = '';
public $DBType = DB_MYSQL; // Default = MYSQL!
public $DBType = DB_ClickHouse; // Default = MYSQL!
public $DBTableType = 'winsyslog'; // Default = WINSYSLOG DB Layout!
public $DBTableName = 'systemevents'; // Default Tabelname from WINSYSLOG
public $DBEnableRowCounting = true; // Default RowCounting is enabled!

View File

@ -2470,4 +2470,4 @@ class LogStreamPDO extends LogStream {
// --- End of Class!
}
?>
?>

7
src/classes/phpClickHouse/.gitignore vendored Normal file
View File

@ -0,0 +1,7 @@
/.phpcs-cache
/phpcs.xml
/phpstan.neon
/phpunit.xml
composer.lock
vendor/
var/

View File

@ -0,0 +1,33 @@
build:
nodes:
analysis:
environment:
php:
version: 7.1
cache:
disabled: false
directories:
- ~/.composer/cache
project_setup:
override: true
tests:
override:
- php-scrutinizer-run
- phpcs-run
dependencies:
override:
- composer install --no-interaction --prefer-dist
checks:
php:
code_rating: true
tools:
external_code_coverage: true
build_failure_conditions:
- 'elements.rating(<= C).new.exists' # No new classes/methods with a rating of C or worse allowed
- 'issues.severity(>= MAJOR).new.exists' # New issues of major or higher severity
- 'project.metric_change("scrutinizer.test_coverage", < 0)' # Code Coverage decreased from previous inspection
- 'patches.label("Unused Use Statements").new.exists' # No new unused imports patches allowed

View File

@ -0,0 +1,93 @@
dist: trusty
language: php
sudo: false
cache:
directories:
- $HOME/.composer/cache
php:
- 7.1
- 7.2
- nightly
services:
- docker
before_install:
- docker-compose -f tests/docker-compose.yaml up -d
- mv ~/.phpenv/versions/$(phpenv version-name)/etc/conf.d/xdebug.ini{,.disabled} || echo "xdebug not available"
install:
- travis_retry composer update -n --prefer-dist
script: ./vendor/bin/phpunit
jobs:
allow_failures:
- php: nightly
- env: DEV_DEPENDENCIES
include:
- stage: Test
env: LOWEST_DEPENDENCIES
install:
- travis_retry composer update -n --prefer-dist --prefer-lowest
- stage: Test
env: LOWEST_DEPENDENCIES
php: 7.2
install:
- travis_retry composer update -n --prefer-dist --prefer-lowest
- stage: Test
env: LOWEST_DEPENDENCIES
php: nightly
install:
- travis_retry composer update -n --prefer-dist --prefer-lowest
- stage: Test
env: DEV_DEPENDENCIES
php: nightly
install:
- composer config minimum-stability dev
- travis_retry composer update -n --prefer-dist
- stage: Test
env: COVERAGE
php: 7.1
before_script:
- mv ~/.phpenv/versions/$(phpenv version-name)/etc/conf.d/xdebug.ini{.disabled,}
- if [[ ! $(php -m | grep -si xdebug) ]]; then echo "xdebug required for coverage"; exit 1; fi
script:
- ./vendor/bin/phpunit --coverage-clover ./build/logs/clover.xml
after_script:
- wget https://github.com/scrutinizer-ci/ocular/releases/download/1.5.2/ocular.phar
- php ocular.phar code-coverage:upload --format=php-clover build/logs/clover.xml
- stage: Code Quality
if: type = pull_request
env: PULL_REQUEST_CODING_STANDARD
php: 7.1
install: travis_retry composer install --prefer-dist
script:
- |
if [ $TRAVIS_BRANCH != "master" ]; then
git remote set-branches --add origin $TRAVIS_BRANCH;
git fetch origin $TRAVIS_BRANCH;
fi
- git merge-base origin/$TRAVIS_BRANCH $TRAVIS_PULL_REQUEST_SHA || git fetch origin +refs/pull/$TRAVIS_PULL_REQUEST/merge --unshallow
- wget https://github.com/diff-sniffer/git/releases/download/0.1.0/git-phpcs.phar
- php git-phpcs.phar origin/$TRAVIS_BRANCH...$TRAVIS_PULL_REQUEST_SHA
# - stage: Code Quality
# if: NOT type = pull_request
# env: CODING_STANDARD
# php: 7.1
# install: travis_retry composer install --prefer-dist
# script:
# - ./vendor/bin/phpcs
- stage: Code Quality
env: STATIC_ANALYSIS
script: ./vendor/bin/phpstan analyse

View File

@ -0,0 +1,204 @@
PHP ClickHouse wrapper - Changelog
======================
### 2018-09-25 [Release 1.3.1]
* Pull request #94 from simPod: Uint64 values
* Pull request #95 from simPod: Bump to php 7.1
### 2018-09-11 [Release 1.2.4]
* Fix #91 ,Does not work inserting with the database name in the table
* pull request #90 from simPod: Refactor partitions()
### 2018-08-30 [Release 1.2.3]
* Escape values in arrays, pull request #87 from simPod/fix-escape
* fix-bindings: pull request #84 from simPod/fix-bindings
* Added quotes arount table and column names in the insert wrapper.
* Docker Compose in tests
### 2018-07-24 [Release 1.2.2]
* Connection without [port](https://github.com/smi2/phpClickHouse#connection-without-port)
### 2018-07-16 [Release 1.2.1]
* New `$client->getServerVersion()`
* Rewrite method `$client->ping()`
* Fix `include.php` - ClickHouseException before exceptions
* Add CHANGELOG.md
* New `interface ClickHouseException`
### 2018-07-06 [Release 1.2.0]
* Fix `SelectAsync() & executeAsync()`, some task freeze
### 2018-07-04 [Release 1.1.2]
* Republic 1.1.1
### 2018-07-02 [Release 1.1.1]
* #47 Bindings wrong work - fix
### 2018-07-02 [Release 1.1.0]
New:
* `$client->getServerUptime()` Returns the server's uptime in seconds.
* `$client->getServerSystemSettings()` Read system.settings table and return array
* `$client->streamWrite()` function
* `$client->streamRead()` function
Warning:
* Now default enable`HttpCompression` set true
* Deprecated `StreamInsert` class
Fix:
* Fix `rawData()` result in `JSONCompact & JSONEachRow` format
* Fix Statement - unnecessary memory usage
* Fix support php5.6
### 2018-06-29 [Release 1.0.1]
* Do not convert int parameters in array to string in Bindings [pull 67](https://github.com/smi2/phpClickHouse/pull/67)
*
### 2018-06-25 [Release 1.0.0]
* Use Semantic versioning
### 2018-06-22
* Fix `tableSize('name')` and `tablesSize()`
### 2018-06-19
* Add DataTime Interface for Bind
* Fix phpDoc
* `Composer->require->"php": ">=5.6"`
### 2018-05-09
* Move `\ClickHouseDB\WhereInFile` to `\ClickHouseDB\Query\WhereInFile`
* Move `\ClickHouseDB\QueryException` to `\ClickHouseDB\Exception\QueryException`
* Move `\ClickHouseDB\DatabaseException` to `ClickHouseDB\Exception\DatabaseException`
* Move `\ClickHouseDB\FormatLine` to `\ClickHouseDB\Quote\FormatLine`
* Move `\ClickHouseDB\WriteToFile` to `ClickHouseDB\Query\WriteToFile`
* Move `\Curler\Request` to `\ClickHouseDB\Transport\CurlerRequest`
* Move `\Curler\CurlerRolling` to `\ClickHouseDB\Transport\CurlerRolling`
* Up to php 7.2 & phpunit 7.1 for Dev & Prs4 Autoloading
### 2018-03-26
* Fix StreamInsert : one stream work faster and safe than loop #PR43
* Fix cluster->clientLike()
### 2017-12-28
* Fix `FORMAT JSON` if set FORMAT in sql
* GetRaw() - result raw response if not json ``SELECT number as format_id FROM system.numbers LIMIT 3 FORMAT CSVWithNames``
### 2017-12-22
* progressFunction()
* Escape values
### 2017-12-12
* Not set `FORMAT JSON` if set FORMAT in sql
### 2017-11-22
- Add insertAssocBulk
### 2017-08-25
- Fix tablesSize(), use database filter
- Fix partitions(), use database filter
### 2017-08-14
- Add session_id support
### 2017-02-20
- Build composer 0.17.02
### 2016-12-09
- for ReadOnly users need set : `client->setReadOnlyUser(true);` or `$confi['readonly']` , see exam19_readonly_user.php
### 2016-11-25
- `client->truncateTable('tableName')`
- `cluster->getMasterNodeForTable('dbName.tableName') // node have is_leader=1`
- `cluster->getSizeTable('dbName.tableName')`
- `cluster->getTables()`
- `cluster->truncateTable('dbName.tableName')`
- See example cluster_06_truncate_table.php
### 2016-11-24
- add `cluster->setSoftCheck()`
- insertBatchFiles() support `$file_names` - string or array , `$columns_array` - array or null
- add insertBatchStream() return `\Curler\Request` no exec
- writeStreamData() return `\Curler\Request`
- fix httpCompression(false)
- getHeaders() as array from `\Curler\Request`
- `setReadFunction( function() )` in `Request`
- Add class StreamInsert, direct read from stream_resource to clickhouse:stream
### 2016-11-04
- add `$db->insertBatchTSVFiles()`,
- add format param in `$db->insertBatchFiles(,,,format)`,
- deprecated class CSV
- Add static class `\ClickHouseDB\FormatLine:CSV(),\ClickHouseDB\FormatLine:TSV(),\ClickHouseDB\FormatLine:Insert()`
- CSV RFC4180 - `\ClickHouseDB\FormatLine::CSV(Array))."\n"`
- Update exam12_array.php + unit tests
### 2016-11-03
- `$db->enableLogQueries(true)` - write to system.query_log
- `$db->enableExtremes(true);` - default extremes now, disabled
- `$db->isExists($database,$table)`
### 2016-10-27
- add Connect timeout , $db->setConnectTimeOut(5);
- change default ConnectTimeOut = 5 seconds. before 1 sec.
- change DNS_CACHE default to 120 seconds
### 2016-10-25 Release 0.16.10
- fix timeout error and add test
### 2016-10-23
- client->setTimeout($seconds)
- cluster->clientLike($cluster,$ip_addr_like)
- Delete all migration code from driver, move to https://github.com/smi2/phpMigrationsClickhouse
### 2016-09-20 Release 0.16.09
- Version/Release names: [ zero dot year dot month]
- Support cluster: new class Cluster and ClusterQuery
- output_format_write_statistics, for clickhouse version > v1.1.54019-stable
- WriteToFile in select,selectAsync
- Degeneration for Bindings & Conditions
- $db->select(new Query("Select..."));
- remove findActiveHostAndCheckCluster , clusterHosts , checkServerReplicas
- Add cleanQueryDegeneration(),addQueryDegeneration()
- Need $db->enableQueryConditions(); for use Conditions ; default Conditions - disabled;
- float in CurlerRequest->timeOut(2.5) = 2500 ms
- tablesSize() - add `sizebytes`
### 2016-08-11 Release 0.2.0
- exception on error write
### 2016-08-06 Release 0.1.0
- init

View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2011-2016 Smi2, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,932 @@
PHP ClickHouse wrapper
======================
[![Build Status](https://travis-ci.org/smi2/phpClickHouse.svg)](https://travis-ci.org/smi2/phpClickHouse)
[![Downloads](https://poser.pugx.org/smi2/phpClickHouse/d/total.svg)](https://packagist.org/packages/smi2/phpClickHouse)
[![Packagist](https://poser.pugx.org/smi2/phpClickHouse/v/stable.svg)](https://packagist.org/packages/smi2/phpClickHouse)
[![Licence](https://poser.pugx.org/smi2/phpClickHouse/license.svg)](https://packagist.org/packages/smi2/phpClickHouse)
[![Quality Score](https://scrutinizer-ci.com/g/smi2/phpClickHouse/badges/quality-score.png?b=master)](https://scrutinizer-ci.com/g/smi2/phpClickHouse)
[![Code Coverage](https://scrutinizer-ci.com/g/smi2/phpClickHouse/badges/coverage.png?b=master)](https://scrutinizer-ci.com/g/smi2/phpClickHouse)
## Features
- No dependency, only Curl (support php `>=7.1` )
- Select parallel queries (asynchronous)
- Asynchronous bulk inserts from CSV file
- Http compression (Gzip), for bulk inserts
- Find active host, check cluster
- Select WHERE IN ( _local csv file_ )
- SQL conditions & template
- tablesSize & databaseSize
- listPartitions
- truncateTable in cluster
- Insert array as column
- Get master node replica in cluster
- Get tableSize in all nodes
- Async get ClickHouse progress function
- streamRead/Write & Closure functions
[Russian articles habr.com 1](https://habrahabr.ru/company/smi2/blog/317682/) [on habr.com 2](https://habr.com/company/smi2/blog/314558/)
## Install composer
```
composer require smi2/phpclickhouse
```
In php
```php
// vendor autoload
$db = new ClickHouseDB\Client(['config_array']);
$db->ping();
```
Last stable version for:
- php 5.6 = `1.1.2`
- php 7.0 = `1.2.4`
[Packagist](https://packagist.org/packages/smi2/phpclickhouse)
## Start
Connect and select database:
```php
$config = [
'host' => '192.168.1.1',
'port' => '8123',
'username' => 'default',
'password' => ''
];
$db = new ClickHouseDB\Client($config);
$db->database('default');
$db->setTimeout(1.5); // 1500 ms
$db->setTimeout(10); // 10 seconds
$db->setConnectTimeOut(5); // 5 seconds
```
Show tables:
```php
print_r($db->showTables());
```
Create table:
```php
$db->write('
CREATE TABLE IF NOT EXISTS summing_url_views (
event_date Date DEFAULT toDate(event_time),
event_time DateTime,
site_id Int32,
site_key String,
views Int32,
v_00 Int32,
v_55 Int32
)
ENGINE = SummingMergeTree(event_date, (site_id, site_key, event_time, event_date), 8192)
');
```
Show create table:
```php
echo $db->showCreateTable('summing_url_views');
```
Insert data:
```php
$stat = $db->insert('summing_url_views',
[
[time(), 'HASH1', 2345, 22, 20, 2],
[time(), 'HASH2', 2345, 12, 9, 3],
[time(), 'HASH3', 5345, 33, 33, 0],
[time(), 'HASH3', 5345, 55, 0, 55],
],
['event_time', 'site_key', 'site_id', 'views', 'v_00', 'v_55']
);
```
If you need to insert UInt64 value, you can wrap the value in `ClickHouseDB\Type\UInt64` DTO.
```php
$statement = $db->insert('table_name',
[
[time(), UInt64::fromString('18446744073709551615')],
],
['event_time', 'uint64_type_column']
);
UInt64::fromString('18446744073709551615')
```
Select:
```php
$statement = $db->select('SELECT * FROM summing_url_views LIMIT 2');
```
Work with Statement:
```php
// Count select rows
$statement->count();
// Count all rows
$statement->countAll();
// fetch one row
$statement->fetchOne();
// get extremes min
print_r($statement->extremesMin());
// totals row
print_r($statement->totals());
// result all
print_r($statement->rows());
// totalTimeRequest
print_r($statement->totalTimeRequest());
// raw answer JsonDecode array, for economy memory
print_r($statement->rawData());
// raw curl_info answer
print_r($statement->responseInfo());
// human size info
print_r($statement->info());
// if clickhouse-server version >= 54011
$db->settings()->set('output_format_write_statistics',true);
print_r($statement->statistics());
```
Select result as tree:
```php
$statement = $db->select('
SELECT event_date, site_key, sum(views), avg(views)
FROM summing_url_views
WHERE site_id < 3333
GROUP BY event_date, url_hash
WITH TOTALS
');
print_r($statement->rowsAsTree('event_date.site_key'));
/*
(
[2016-07-18] => Array
(
[HASH2] => Array
(
[event_date] => 2016-07-18
[url_hash] => HASH2
[sum(views)] => 12
[avg(views)] => 12
)
[HASH1] => Array
(
[event_date] => 2016-07-18
[url_hash] => HASH1
[sum(views)] => 22
[avg(views)] => 22
)
)
)
*/
```
Drop table:
```php
$db->write('DROP TABLE IF EXISTS summing_url_views');
```
Features
--------
### Select parallel queries (asynchronous)
```php
$state1 = $db->selectAsync('SELECT 1 as ping');
$state2 = $db->selectAsync('SELECT 2 as ping');
// run
$db->executeAsync();
// result
print_r($state1->rows());
print_r($state2->fetchOne('ping'));
```
### Parallelizing massive inserts from CSV file
```php
$file_data_names = [
'/tmp/clickHouseDB_test.1.data',
'/tmp/clickHouseDB_test.2.data',
'/tmp/clickHouseDB_test.3.data',
'/tmp/clickHouseDB_test.4.data',
'/tmp/clickHouseDB_test.5.data',
];
// insert all files
$stat = $db->insertBatchFiles(
'summing_url_views',
$file_data_names,
['event_time', 'site_key', 'site_id', 'views', 'v_00', 'v_55']
);
```
### Parallelizing errors
selectAsync without executeAsync
```php
$select = $db->selectAsync('SELECT * FROM summing_url_views LIMIT 1');
$insert = $db->insertBatchFiles('summing_url_views', ['/tmp/clickHouseDB_test.1.data'], ['event_time']);
// 'Exception' with message 'Queue must be empty, before insertBatch, need executeAsync'
```
see example/exam5_error_async.php
### Gzip & enable_http_compression
On fly read CSV file and compress zlib.deflate.
```php
$db->settings()->max_execution_time(200);
$db->enableHttpCompression(true);
$result_insert = $db->insertBatchFiles('summing_url_views', $file_data_names, [...]);
foreach ($result_insert as $fileName => $state) {
echo $fileName . ' => ' . json_encode($state->info_upload()) . PHP_EOL;
}
```
see speed test `example/exam08_http_gzip_batch_insert.php`
### Max execution time
```php
$db->settings()->max_execution_time(200); // second
```
### Connection without port
```php
$config['host']='blabla.com';
$config['port']=0;
// getUri() === 'http://blabla.com'
$config['host']='blabla.com/urls';
$config['port']=8765;
// getUri() === 'http://blabla.com/urls'
$config['host']='blabla.com:2224';
$config['port']=1234;
// getUri() === 'http://blabla.com:2224'
```
### tablesSize & databaseSize
Result in _human size_
```php
print_r($db->databaseSize());
print_r($db->tablesSize());
print_r($db->tableSize('summing_partions_views'));
```
### Partitions
```php
$count_result = 2;
print_r($db->partitions('summing_partions_views', $count_result));
```
Drop partitions ( pre production )
```php
$count_old_days = 10;
print_r($db->dropOldPartitions('summing_partions_views', $count_old_days));
// by `partition_id`
print_r($db->dropPartition('summing_partions_views', '201512'));
```
### Select WHERE IN ( _local csv file_ )
```php
$file_name_data1 = '/tmp/temp_csv.txt'; // two column file [int,string]
$whereIn = new \ClickHouseDB\Query\WhereInFile();
$whereIn->attachFile($file_name_data1, 'namex', ['site_id' => 'Int32', 'site_hash' => 'String'], \ClickHouseDB\Query\WhereInFile::FORMAT_CSV);
$result = $db->select($sql, [], $whereIn);
// see example/exam7_where_in.php
```
### Bindings
Bindings:
```php
$date1 = new DateTime("now"); // DateTimeInterface
$Bindings = [
'select_date' => ['2000-10-10', '2000-10-11', '2000-10-12'],
'datetime'=>$date,
'limit' => 5,
'from_table' => 'table'
];
$statement = $db->selectAsync("SELECT FROM {table} WHERE datetime=:datetime limit {limit}", $Bindings);
// Double bind in {KEY}
$keys=[
'A'=>'{B}',
'B'=>':C',
'C'=>123,
'Z'=>[':C',':B',':C']
];
$this->client->selectAsync('{A} :Z', $keys)->sql() // == "123 ':C',':B',':C' FORMAT JSON",
```
#### Simple sql conditions & template
Conditions is deprecated, if need use:
`$db->enableQueryConditions();`
Example with QueryConditions:
```php
$db->enableQueryConditions();
$input_params = [
'select_date' => ['2000-10-10', '2000-10-11', '2000-10-12'],
'limit' => 5,
'from_table' => 'table'
];
$select = '
SELECT * FROM {from_table}
WHERE
{if select_date}
event_date IN (:select_date)
{else}
event_date=today()
{/if}
{if limit}
LIMIT {limit}
{/if}
';
$statement = $db->selectAsync($select, $input_params);
echo $statement->sql();
/*
SELECT * FROM table
WHERE
event_date IN ('2000-10-10','2000-10-11','2000-10-12')
LIMIT 5
FORMAT JSON
*/
$input_params['select_date'] = false;
$statement = $db->selectAsync($select, $input_params);
echo $statement->sql();
/*
SELECT * FROM table
WHERE
event_date=today()
LIMIT 5
FORMAT JSON
*/
$state1 = $db->selectAsync(
'SELECT 1 as {key} WHERE {key} = :value',
['key' => 'ping', 'value' => 1]
);
// SELECT 1 as ping WHERE ping = "1"
```
Example custom query Degeneration in `exam16_custom_degeneration.php`
```
SELECT {ifint VAR} result_if_intval_NON_ZERO{/if}
SELECT {ifint VAR} result_if_intval_NON_ZERO {else} BLA BLA{/if}
```
### Settings
3 way set any settings
```php
// in array config
$config = [
'host' => 'x',
'port' => '8123',
'username' => 'x',
'password' => 'x',
'settings' => ['max_execution_time' => 100]
];
$db = new ClickHouseDB\Client($config);
// settings via constructor
$config = [
'host' => 'x',
'port' => '8123',
'username' => 'x',
'password' => 'x'
];
$db = new ClickHouseDB\Client($config, ['max_execution_time' => 100]);
// set method
$config = [
'host' => 'x',
'port' => '8123',
'username' => 'x',
'password' => 'x'
];
$db = new ClickHouseDB\Client($config);
$db->settings()->set('max_execution_time', 100);
// apply array method
$db->settings()->apply([
'max_execution_time' => 100,
'max_block_size' => 12345
]);
// check
if ($db->settings()->getSetting('max_execution_time') !== 100) {
throw new Exception('Bad work settings');
}
// see example/exam10_settings.php
```
### Use session_id with ClickHouse
`useSession()` - make new session_id or use exists `useSession(value)`
```php
// enable session_id
$db->useSession();
$sesion_AA=$db->getSession(); // return session_id
$db->write(' CREATE TEMPORARY TABLE IF NOT EXISTS temp_session_test (number UInt64)');
$db->write(' INSERT INTO temp_session_test SELECT number*1234 FROM system.numbers LIMIT 30');
// reconnect to continue with other session
$db->useSession($sesion_AA);
```
### Array as column
```php
$db->write('
CREATE TABLE IF NOT EXISTS arrays_test_string (
s_key String,
s_arr Array(String)
)
ENGINE = Memory
');
$db->insert('arrays_test_string',
[
['HASH1', ["a", "dddd", "xxx"]],
['HASH1', ["b'\tx"]],
],
['s_key', 's_arr']
);
// see example/exam12_array.php
```
Class for FormatLine array
```php
var_dump(
\ClickHouseDB\FormatLine::CSV(
['HASH1', ["a", "dddd", "xxx"]]
)
);
var_dump(
\ClickHouseDB\FormatLine::TSV(
['HASH1', ["a", "dddd", "xxx"]]
)
);
// example write to file
$row=['event_time'=>date('Y-m-d H:i:s'),'arr1'=>[1,2,3],'arrs'=>["A","B\nD\nC"]];
file_put_contents($fileName,\ClickHouseDB\FormatLine::TSV($row)."\n",FILE_APPEND);
```
### Cluster drop old Partitions
Example code :
```php
class my
{
/**
* @return \ClickHouseDB\Cluster
*/
public function getClickHouseCluster()
{
return $this->_cluster;
}
public function msg($text)
{
echo $text."\n";
}
private function cleanTable($dbt)
{
$sizes=$this->getClickHouseCluster()->getSizeTable($dbt);
$this->msg("Clean table : $dbt,size = ".$this->humanFileSize($sizes));
// split string "DB.TABLE"
list($db,$table)=explode('.',$dbt);
// Get Master node for table
$nodes=$this->getClickHouseCluster()->getMasterNodeForTable($dbt);
foreach ($nodes as $node)
{
$client=$this->getClickHouseCluster()->client($node);
$size=$client->database($db)->tableSize($table);
$this->msg("$node \t {$size['size']} \t {$size['min_date']} \t {$size['max_date']}");
$client->dropOldPartitions($table,30,30);
}
}
public function clean()
{
$this->msg("clean");
$this->getClickHouseCluster()->setScanTimeOut(2.5); // 2500 ms
$this->getClickHouseCluster()->setSoftCheck(true);
if (!$this->getClickHouseCluster()->isReplicasIsOk())
{
throw new Exception('Replica state is bad , error='.$this->getClickHouseCluster()->getError());
}
$this->cleanTable('model.history_full_model_sharded');
$this->cleanTable('model.history_model_result_sharded');
}
}
```
### HTTPS
```php
$db = new ClickHouseDB\Client($config);
$db->settings()->https();
```
### getServer System.Settings & Uptime
```php
print_r($db->getServerUptime());
print_r($db->getServerSystemSettings());
print_r($db->getServerSystemSettings('merge_tree_min_rows_for_concurrent_read'));
```
### ReadOnly ClickHouse user
```php
$config = [
'host' => '192.168.1.20',
'port' => '8123',
'username' => 'ro',
'password' => 'ro',
'readonly' => true
];
```
### Direct write to file
Send result from clickhouse, without parse json.
```php
$WriteToFile=new ClickHouseDB\WriteToFile('/tmp/_1_select.csv');
$db->select('select * from summing_url_views',[],null,$WriteToFile);
// or
$db->selectAsync('select * from summing_url_views limit 4',[],null,new ClickHouseDB\WriteToFile('/tmp/_3_select.tab',true,'TabSeparatedWithNames'));
$db->selectAsync('select * from summing_url_views limit 4',[],null,new ClickHouseDB\WriteToFile('/tmp/_4_select.tab',true,'TabSeparated'));
$statement=$db->selectAsync('select * from summing_url_views limit 54',[],null,new ClickHouseDB\WriteToFile('/tmp/_5_select.csv',true,ClickHouseDB\WriteToFile::FORMAT_CSV));
```
## Stream
streamWrite() : Closure stream write
```php
$streamWrite=new ClickHouseDB\Transport\StreamWrite($stream);
$client->streamWrite(
$streamWrite, // StreamWrite Class
'INSERT INTO {table_name} FORMAT JSONEachRow', // SQL Query
['table_name'=>'_phpCh_SteamTest'] // Binds
);
```
### streamWrite & custom Closure & Deflate
```php
$stream = fopen('php://memory','r+');
for($f=0;$f<23;$f++) { // Make json data in stream
fwrite($stream, json_encode(['a'=>$f]).PHP_EOL );
}
rewind($stream); // rewind stream
$streamWrite=new ClickHouseDB\Transport\StreamWrite($stream);
$streamWrite->applyGzip(); // Add Gzip zlib.deflate in stream
$callable = function ($ch, $fd, $length) use ($stream) {
return ($line = fread($stream, $length)) ? $line : '';
};
// Apply closure
$streamWrite->closure($callable);
// Run Query
$r=$client->streamWrite($streamWrite,'INSERT INTO {table_name} FORMAT JSONEachRow', ['table_name'=>'_phpCh_SteamTest']);
// Result
print_r($r->info_upload());
```
### streamRead
streamRead is like `WriteToFile`
```php
$stream = fopen('php://memory','r+');
$streamRead=new ClickHouseDB\Transport\StreamRead($stream);
$r=$client->streamRead($streamRead,'SELECT sin(number) as sin,cos(number) as cos FROM {table_name} LIMIT 4 FORMAT JSONEachRow', ['table_name'=>'system.numbers']);
rewind($stream);
while (($buffer = fgets($stream, 4096)) !== false) {
echo ">>> ".$buffer;
}
fclose($stream); // Need Close Stream
// Send to closure
$stream = fopen('php://memory','r+');
$streamRead=new ClickHouseDB\Transport\StreamRead($stream);
$callable = function ($ch, $string) use ($stream) {
// some magic for _BLOCK_ data
fwrite($stream, str_ireplace('"sin"','"max"',$string));
return strlen($string);
};
$streamRead->closure($callable);
$r=$client->streamRead($streamRead,'SELECT sin(number) as sin,cos(number) as cos FROM {table_name} LIMIT 44 FORMAT JSONEachRow', ['table_name'=>'system.numbers']);
```
### insert Assoc Bulk
```php
$oneRow = [
'one' => 1,
'two' => 2,
'thr' => 3,
];
$failRow = [
'two' => 2,
'one' => 1,
'thr' => 3,
];
$db->insertAssocBulk([$oneRow, $oneRow, $failRow])
```
### progressFunction
```php
// Apply function
$db->progressFunction(function ($data) {
echo "CALL FUNCTION:".json_encode($data)."\n";
});
$st=$db->select('SELECT number,sleep(0.2) FROM system.numbers limit 5');
// Print
// ...
// CALL FUNCTION:{"read_rows":"2","read_bytes":"16","total_rows":"0"}
// CALL FUNCTION:{"read_rows":"3","read_bytes":"24","total_rows":"0"}
// ...
```
### Cluster
```php
$config = [
'host' => 'cluster.clickhouse.dns.com', // any node name in cluster
'port' => '8123',
'username' => 'default', // all node have one login+password
'password' => ''
];
// client connect first node, by DNS, read list IP, then connect to ALL nodes for check is !OK!
$cl = new ClickHouseDB\Cluster($config);
$cl->setScanTimeOut(2.5); // 2500 ms, max time connect per one node
// Check replica state is OK
if (!$cl->isReplicasIsOk())
{
throw new Exception('Replica state is bad , error='.$cl->getError());
}
// get array nodes, and clusers
print_r($cl->getNodes());
print_r($cl->getClusterList());
// get node by cluster
$name='some_cluster_name';
print_r($cl->getClusterNodes($name));
// get counts
echo "> Count Shard = ".$cl->getClusterCountShard($name)."\n";
echo "> Count Replica = ".$cl->getClusterCountReplica($name)."\n";
// get nodes by table & print size per node
$nodes=$cl->getNodesByTable('shara.adpreview_body_views_sharded');
foreach ($nodes as $node)
{
echo "$node > \n";
// select one node
print_r($cl->client($node)->tableSize('adpreview_body_views_sharded'));
print_r($cl->client($node)->showCreateTable('shara.adpreview_body_views'));
}
// work with one node
// select by IP like "*.248*" = `123.123.123.248`, dilitmer `;` , if not fount -- select first node
$cli=$cl->clientLike($name,'.298;.964'); // first find .298 then .964 , result is ClickHouseDB\Client
$cli->ping();
// truncate table on cluster
$result=$cl->truncateTable('dbNane.TableName_sharded');
// get one active node ( random )
$cl->activeClient()->setTimeout(0.01);
$cl->activeClient()->write("DROP TABLE IF EXISTS default.asdasdasd ON CLUSTER cluster2");
// find `is_leader` node
$cl->getMasterNodeForTable('dbNane.TableName_sharded');
// errors
var_dump($cl->getError());
//
```
### Return Extremes
```php
$db->enableExtremes(true);
```
### Enable Log Query
You can log all query in ClickHouse
```php
$db->enableLogQueries();
$db->select('SELECT 1 as p');
print_r($db->select('SELECT * FROM system.query_log')->rows());
```
### isExists
```php
$db->isExists($database,$table);
```
### Debug & Verbose
```php
$db->verbose();
```
### Dev & PHPUnit Test
* Don't forget to run composer install. It should setup PSR-4 autoloading.
* Then you can simply run vendor/bin/phpunit and it should output the following
```bash
cp phpunit.xml.dist phpunit.xml
mcedit phpunit.xml
```
Edit in phpunit.xml constants:
```xml
<php>
<env name="CLICKHOUSE_HOST" value="127.0.0.1" />
<env name="CLICKHOUSE_PORT" value="8123" />
<env name="CLICKHOUSE_USER" value="default" />
<env name="CLICKHOUSE_DATABASE" value="phpChTestDefault" />
<env name="CLICKHOUSE_PASSWORD" value="" />
<env name="CLICKHOUSE_TMPPATH" value="/tmp" />
</php>
```
Run test
```bash
./vendor/bin/phpunit
./vendor/bin/phpunit --group ClientTest
```
Run PHPStan
```
# Main
./vendor/bin/phpstan analyse src tests --level 7
# SRC only
./vendor/bin/phpstan analyse src --level 7
# Examples
./vendor/bin/phpstan analyse example -a ./example/Helper.php
```
License
-------
MIT
ChangeLog
---------
See [changeLog.md](CHANGELOG.md)

View File

@ -0,0 +1,36 @@
{
"name": "smi2/phpclickhouse",
"type": "library",
"description": "PHP ClickHouse Client",
"keywords": ["clickhouse", "driver", "client", "curl", "http", "HTTP client", "php"],
"homepage": "https://github.com/smi2/phpClickHouse",
"license": "MIT",
"authors": [
{
"name": "Igor Strykhar",
"email": "isublimity@gmail.com",
"homepage": "https://github.com/isublimity"
}
],
"require": {
"php": "^7.1",
"ext-curl": "*"
},
"require-dev": {
"doctrine/coding-standard": "^5.0",
"phpstan/phpstan": "^0.10.3",
"phpunit/phpunit": "^7",
"sebastian/comparator": "~3.0"
},
"autoload": {
"psr-4": {
"ClickHouseDB\\": "src/"
}
},
"autoload-dev": {
"psr-4": {
"ClickHouseDB\\Tests\\": "tests/",
"ClickHouseDB\\Example\\": "example/"
}
}
}

View File

@ -0,0 +1,7 @@
<?php
return [
'host' => 'tabix.dev7', // you hot name
'port' => '8123',
'username' => 'default',
'password' => ''
];

View File

@ -0,0 +1,196 @@
<?php
namespace ClickHouseDB\Example;
class Helper
{
public static function init()
{
date_default_timezone_set('Europe/Moscow');
error_reporting( E_ALL );
ini_set('display_errors',1);
}
/**
* @param $file_name
* @param int $from_id
* @param int $to_id
*/
public static function makeListSitesKeysDataFile($file_name, $from_id = 1000, $to_id = 20000)
{
@unlink($file_name);
$handle = fopen($file_name, 'w');
$rows = 0;
for ($f = $from_id; $f < $to_id; $f++) {
$j['site_id'] = $f;
$j['site_hash'] = md5($f);
fputcsv($handle, $j);
$rows = $rows + 1;
}
fclose($handle);
echo "Created file [$file_name]: $rows rows...\n";
}
/**
* @param $size
* @param string $unit
* @return string
*/
public static function humanFileSize($size, $unit = '')
{
if ((!$unit && $size >= 1 << 30) || $unit == 'GB') {
return number_format($size / (1 << 30), 2) . ' GB';
}
if ((!$unit && $size >= 1 << 20) || $unit == 'MB') {
return number_format($size / (1 << 20), 2) . ' MB';
}
if ((!$unit && $size >= 1 << 10) || $unit == 'KB') {
return number_format($size / (1 << 10), 2) . ' KB';
}
return number_format($size) . ' bytes';
}
/**
* @param $file_name
* @param int $size
*/
public static function makeSomeDataFile($file_name, $size = 10)
{
@unlink($file_name);
$handle = fopen($file_name, 'w');
$z = 0;
$rows = 0;
$j = [];
for ($ules = 0; $ules < $size; $ules++) {
for ($dates = 0; $dates < 5; $dates++) {
for ($site_id = 12; $site_id < 49; $site_id++) {
for ($hours = 0; $hours < 24; $hours++) {
$z++;
$dt = strtotime('-' . $dates . ' day');
$dt = strtotime('-' . $hours . ' hour', $dt);
$j = [];
$j['event_time'] = date('Y-m-d H:00:00', $dt);
$j['url_hash'] = 'XXXX' . $site_id . '_' . $ules;
$j['site_id'] = $site_id;
$j['views'] = 1;
foreach (['00', 55] as $key) {
$z++;
$j['v_' . $key] = ($z % 2 ? 1 : 0);
}
fputcsv($handle, $j);
$rows++;
}
}
}
}
fclose($handle);
echo "Created file [$file_name]: $rows rows...\n";
}
/**
* @param $file_name
* @param int $size
* @return bool
*/
public static function makeSomeDataFileBigOldDates($file_name, $size = 10)
{
if (is_file($file_name)) {
echo "Exist file [$file_name]: ± rows... size = " . self::humanFileSize(filesize($file_name)) . " \n";
return false;
}
@unlink($file_name);
$handle = fopen($file_name, 'w');
$rows = 0;
for ($day_ago = 0; $day_ago < 360; $day_ago++) {
$date = strtotime('-' . $day_ago . ' day');
for ($hash_id = 1; $hash_id < (1 + $size); $hash_id++)
for ($site_id = 100; $site_id < 199; $site_id++) {
$j['event_time'] = date('Y-m-d H:00:00', $date);
$j['site_id'] = $site_id;
$j['hash_id'] = $hash_id;
$j['views'] = 1;
fputcsv($handle, $j);
$rows++;
}
}
fclose($handle);
echo "Created file [$file_name]: $rows rows... size = " . self::humanFileSize(filesize($file_name)) . " \n";
}
/**
* @param $file_name
* @param int $size
* @return bool
*/
public static function makeSomeDataFileBig($file_name, $size = 10, $shift = 0)
{
if (is_file($file_name)) {
echo "Exist file [$file_name]: ± rows... size = " . self::humanFileSize(filesize($file_name)) . " \n";
return false;
}
@unlink($file_name);
$handle = fopen($file_name, 'w');
$z = 0;
$rows = 0;
$j = [];
for ($ules = 0; $ules < $size; $ules++) {
for ($dates = 0; $dates < 5; $dates++) {
for ($site_id = 12; $site_id < 49; $site_id++) {
for ($hours = 0; $hours < 24; $hours++) {
$z++;
$dt = strtotime('-' . ($dates + $shift) . ' day');
$dt = strtotime('-' . $hours . ' hour', $dt);
$j = [];
$j['event_time'] = date('Y-m-d H:00:00', $dt);
$j['url_hash'] = sha1('XXXX' . $site_id . '_' . $ules) . sha1(microtime() . $site_id . ' ' . mt_rand()) . sha1('XXXX' . $site_id . '_' . $ules);
$j['site_id'] = $site_id;
$j['views'] = 1;
foreach (['00', 55] as $key) {
$z++;
$j['v_' . $key] = ($z % 2 ? 1 : 0);
}
fputcsv($handle, $j);
$rows++;
}
}
}
}
fclose($handle);
echo "Created file [$file_name]: $rows rows... size = " . self::humanFileSize(filesize($file_name)) . " \n";
}
}

View File

@ -0,0 +1,7 @@
<?php
return [
'host' => 'tabix.dev7', // you hot name
'port' => '8123',
'username' => 'default',
'password' => ''
];

View File

@ -0,0 +1,33 @@
<?php
include_once __DIR__ . '/../../include.php';
include_once __DIR__ . '/../Helper.php';
\ClickHouseDB\Example\Helper::init();
$config = include_once __DIR__ . '/00_config_connect.php';
// ----------------------------------------------------------------------
$cl = new ClickHouseDB\Cluster($config);
$cl->setScanTimeOut(2.5); // 2500 ms
if (!$cl->isReplicasIsOk())
{
throw new Exception('Replica state is bad , error='.$cl->getError());
}
echo "Ips:\n";
print_r($cl->getNodes());
echo "getClusterList:\n";
print_r($cl->getClusterList());
//
foreach (['pulse','repikator','sharovara','repikator3x','sharovara3x'] as $name)
{
echo "-------------------- $name ---------------------------\n";
print_r($cl->getClusterNodes($name));
echo "> Count Shard = ".$cl->getClusterCountShard($name)."\n";
echo "> Count Replica = ".$cl->getClusterCountReplica($name)."\n";
}
// ----------------------------------------------------------------------
echo "\n----\nEND\n";
// ----------------------------------------------------------------------

View File

@ -0,0 +1,26 @@
<?php
include_once __DIR__ . '/../../include.php';
include_once __DIR__ . '/../Helper.php';
\ClickHouseDB\Example\Helper::init();
// load production config
$config = include_once __DIR__ . '/00_config_connect.php';
$cl = new ClickHouseDB\Cluster($config);
$cl->setScanTimeOut(2.5); // 2500 ms
if (!$cl->isReplicasIsOk())
{
throw new Exception('Replica state is bad , error='.$cl->getError());
}
//
$cluster_name='sharovara';
//
echo "> $cluster_name , count shard = ".$cl->getClusterCountShard($cluster_name)." ; count replica = ".$cl->getClusterCountReplica($cluster_name)."\n";
echo "\n----\nEND\n";
// ----------------------------------------------------------------------

View File

@ -0,0 +1,30 @@
<?php
include_once __DIR__ . '/../../include.php';
include_once __DIR__ . '/../Helper.php';
\ClickHouseDB\Example\Helper::init();
$config = include_once __DIR__ . '/00_config_connect.php';
$cl = new ClickHouseDB\Cluster($config);
if (!$cl->isReplicasIsOk())
{
throw new Exception('Replica state is bad , error='.$cl->getError());
}
$cluster_name='sharovara';
echo "> $cluster_name , count shard = ".$cl->getClusterCountShard($cluster_name)." ; count replica = ".$cl->getClusterCountReplica($cluster_name)."\n";
// ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
$nodes=$cl->getNodesByTable('shara.adpreview_body_views_sharded');
foreach ($nodes as $node)
{
echo "$node > \n";
print_r($cl->client($node)->tableSize('adpreview_body_views_sharded'));
print_r($cl->client($node)->showCreateTable('shara.adpreview_body_views'));
}
// ------------------------------------------------------------------------------------------------------------------------------------------------------------------------

View File

@ -0,0 +1,29 @@
<?php
include_once __DIR__ . '/../../include.php';
include_once __DIR__ . '/../Helper.php';
\ClickHouseDB\Example\Helper::init();
// load production config
$config = include_once __DIR__ . '/00_config_connect.php';
$cl = new ClickHouseDB\Cluster($config);
$cl->setScanTimeOut(2.5); // 2500 ms
$cl->setSoftCheck(true);
if (!$cl->isReplicasIsOk())
{
throw new Exception('Replica state is bad , error='.$cl->getError());
}
//
$cluster_name='sharovara';
//
echo "> $cluster_name , count shard = ".$cl->getClusterCountShard($cluster_name)." ; count replica = ".$cl->getClusterCountReplica($cluster_name)."\n";
// Выбрать IP содержащий строку ".248" типа 123.123.123.248, разделитель ; - если не найдена первая берется
$cli=$cl->clientLike($cluster_name,'.298;.964');
$cli->ping();
echo "\n----\nEND\n";
// ----------------------------------------------------------------------

View File

@ -0,0 +1,43 @@
<?php
include_once __DIR__ . '/../../include.php';
include_once __DIR__ . '/../Helper.php';
\ClickHouseDB\Example\Helper::init();
// load production config
$config = include_once __DIR__ . '/00_config_connect.php';
$cl = new ClickHouseDB\Cluster($config);
$cl->setScanTimeOut(2.5); // 2500 ms
$cl->setSoftCheck(true);
if (!$cl->isReplicasIsOk())
{
throw new Exception('Replica state is bad , error='.$cl->getError());
}
$tables=$cl->getTables();
foreach ($tables as $dbtable=>$tmp)
{
echo ">>> $dbtable :";
$size=$cl->getSizeTable($dbtable);
echo "\t".\ClickHouseDB\Example\Helper::humanFileSize($size)."\n";
}
$table_for_truncate='target.events_sharded';
$result=$cl->truncateTable($table_for_truncate);
echo "Result:truncate table\n";
print_r($result);
echo "\n----\nEND\n";
// ----------------------------------------------------------------------

View File

@ -0,0 +1,47 @@
<?php
include_once __DIR__ . '/../../include.php';
include_once __DIR__ . '/../Helper.php';
\ClickHouseDB\Example\Helper::init();
// load production config
$config = include_once __DIR__ . '/00_config_connect.php';
$cl = new ClickHouseDB\Cluster(['host'=>'172.18.0.8','username'=>'default','password'=>'','port'=>8123]);
$cl->setScanTimeOut(2.5); // 2500 ms
$cl->setSoftCheck(true);
if (!$cl->isReplicasIsOk())
{
throw new Exception('Replica state is bad , error='.$cl->getError());
}
print_r($cl->getClusterList());
print_r($cl->getNodes());
print_r($cl->getClusterNodes('cluster'));
$cl->activeClient()->setTimeout(0.01);
for ($z=0;$z<50;$z++)
{
try{
$x=$cl->activeClient()->write("DROP TABLE IF EXISTS default.asdasdasd ON CLUSTER cluster2");
}catch (Exception $exception)
{
}
}
$cl->activeClient()->setTimeout(22);
$x=$cl->activeClient()->write("DROP TABLE IF EXISTS default.asdasdasd ON CLUSTER cluster2");
$x->dump();
echo "\n----\nEND\n";
// ----------------------------------------------------------------------

View File

@ -0,0 +1,58 @@
<?php
include_once __DIR__ . '/../../include.php';
include_once __DIR__ . '/../Helper.php';
\ClickHouseDB\Example\Helper::init();
// load production config
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$db->settings()->set('replication_alter_partitions_sync',2);
$db->settings()->set('experimental_allow_extended_storage_definition_syntax',1);
for ( $looop=1;$looop<100;$looop++)
{
$db->write("DROP TABLE IF EXISTS testoperation_log");
$db->write("
CREATE TABLE IF NOT EXISTS `testoperation_log` (
`event_date` Date default toDate(time),
`event` String DEFAULT '',
`time` DateTime default now()
) ENGINE=MergeTree ORDER BY time PARTITION BY event_date
");
echo "INSERT DATA....\n";
for ($z=0;$z<1000;$z++)
{
$dataInsert=['time'=>strtotime('-'.mt_rand(0,4000).' day'),'event'=>strval($z)];
try {
$db->insertAssocBulk('testoperation_log',$dataInsert);
echo "$z\r";
}
catch (Exception $exception)
{
die("Error:".$exception->getMessage());
}
}
echo "INSER OK\n DROP PARTITION...\n";
$partitons=($db->partitions('testoperation_log'));
foreach ($partitons as $part)
{
echo "$looop\t\t".$part['partition']."\t".$part['name']."\t".$part['active']."\r";
$db->dropPartition('default.testoperation_log',$part['partition']);
}
echo "SELECT count() ...".str_repeat(" ",300)."\n";
print_r($db->select('SELECT count() FROM default.testoperation_log')->rows());
}
echo "\n----\nEND\n";
// ----------------------------------------------------------------------

View File

@ -0,0 +1,32 @@
<?php
include_once __DIR__ . '/../include.php';
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
//$db->verbose();
$db->settings()->readonly(false);
$result = $db->select(
'SELECT 12 as {key} WHERE {key} = :value',
['key' => 'ping', 'value' => 12]
);
if ($result->fetchOne('ping') != 12) {
echo "Error : ? \n";
}
print_r($result->fetchOne());
// ---------------------------- ASYNC SELECT ----------------------------
$state1 = $db->selectAsync('SELECT 1 as {key} WHERE {key} = :value', ['key' => 'ping', 'value' => 1]);
$state2 = $db->selectAsync('SELECT 2 as ping');
$db->executeAsync();
print_r($state1->fetchOne());
print_r($state1->rows());
print_r($state2->fetchOne('ping'));
//----------------------------------------//----------------------------------------

View File

@ -0,0 +1,146 @@
<?php
include_once __DIR__ . '/../include.php';
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
// ---------------------------- Write ----------------------------
echo "\n-----\ntry write:create_table\n";
$db->database('default');
//------------------------------------------------------------------------------
echo 'Tables EXISTS: ' . json_encode($db->showTables()) . PHP_EOL;
$db->write('DROP TABLE IF EXISTS summing_url_views');
echo 'Tables EXISTS: ' . json_encode($db->showTables()) . PHP_EOL;
$db->write('
CREATE TABLE IF NOT EXISTS summing_url_views (
event_date Date DEFAULT toDate(event_time),
event_time DateTime,
url_hash String,
site_id Int32,
views Int32,
v_00 Int32,
v_55 Int32
)
ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192)
'
);
echo 'Table EXISTS: ' . json_encode($db->showTables()) . PHP_EOL;
/*
Table EXISTS: [{"name": "summing_url_views"}]
*/
//------------------------------------------------------------------------------
echo "Insert\n";
$stat = $db->insert('summing_url_views',
[
[time(), 'HASH1', 2345, 22, 20, 2],
[time(), 'HASH2', 2345, 12, 9, 3],
[time(), 'HASH3', 5345, 33, 33, 0],
[time(), 'HASH3', 5345, 55, 0, 55],
],
['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']
);
echo "Insert Done\n";
//------------------------------------------------------------------------------
echo "Try select \n";
$st = $db->select('SELECT * FROM summing_url_views LIMIT 2');
echo "Count select rows:".$st->count()."\n";
echo "Count all rows:".$st->countAll()."\n";
echo "First row:\n";
print_r($st->fetchOne());
echo "extremes_min:\n";
print_r($st->extremesMin());
echo "totals:\n";
print_r($st->totals());
$st=$db->select('SELECT event_date,url_hash,sum(views),avg(views) FROM summing_url_views WHERE site_id<3333 GROUP BY event_date,url_hash WITH TOTALS');
echo "Count select rows:".$st->count()."\n";
/*
2
*/
echo "Count all rows:".$st->countAll()."\n";
/*
false
*/
echo "First row:\n";
print_r($st->fetchOne());
/*
(
[event_date] => 2016-07-18
[url_hash] => HASH1
[sum(views)] => 22
[avg(views)] => 22
)
*/
echo "totals:\n";
print_r($st->totals());
/*
(
[event_date] => 0000-00-00
[url_hash] =>
[sum(views)] => 34
[avg(views)] => 17
)
*/
echo "Tree Path [event_date.url_hash]:\n";
print_r($st->rowsAsTree('event_date.url_hash'));
/*
(
[2016-07-18] => Array
(
[HASH2] => Array
(
[event_date] => 2016-07-18
[url_hash] => HASH2
[sum(views)] => 12
[avg(views)] => 12
)
[HASH1] => Array
(
[event_date] => 2016-07-18
[url_hash] => HASH1
[sum(views)] => 22
[avg(views)] => 22
)
)
)
*/
$db->write("DROP TABLE IF EXISTS summing_url_views");
echo "Tables EXISTS:".json_encode($db->showTables())."\n";
/*
Tables EXISTS:[]
*/

View File

@ -0,0 +1,107 @@
<?php
include_once __DIR__ . '/../include.php';
include_once __DIR__ . '/Helper.php';
\ClickHouseDB\Example\Helper::init();
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$db->enableHttpCompression(true);
$db->write("DROP TABLE IF EXISTS summing_url_views");
$db->write('
CREATE TABLE IF NOT EXISTS summing_url_views (
event_date Date DEFAULT toDate(event_time),
event_time DateTime,
url_hash String,
site_id Int32,
views Int32,
v_00 Int32,
v_55 Int32
)
ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192)
');
echo "Table EXISTS: " . json_encode($db->showTables()) . "\n";
// -------------------------------- CREATE csv file ----------------------------------------------------------------
// ----------------------------------------------------------------------------------------------------
$file_data_names = [
'/tmp/clickHouseDB_test.1.data',
'/tmp/clickHouseDB_test.2.data',
'/tmp/clickHouseDB_test.3.data',
'/tmp/clickHouseDB_test.4.data',
'/tmp/clickHouseDB_test.5.data',
];
foreach ($file_data_names as $file_name) {
\ClickHouseDB\Example\Helper::makeSomeDataFile($file_name, 5);
}
// ----------------------------------------------------------------------------------------------------
echo "insert ONE file:\n";
$time_start = microtime(true);
$stat = $db->insertBatchFiles('summing_url_views', ['/tmp/clickHouseDB_test.1.data'], [
'event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55'
]);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
print_r($db->select('select sum(views) from summing_url_views')->rows());
echo "insert ALL file async:\n";
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_url_views', $file_data_names, [
'event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55'
]);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
print_r($db->select('select sum(views) from summing_url_views')->rows());
// ------------------------------------------------------------------------------------------------
foreach ($file_data_names as $fileName) {
echo $fileName . " : " . $result_insert[$fileName]->totalTimeRequest() . "\n";
}
// ------------------------------------------------------------------------------------------------
/*
Table EXISTSs:[{"name":"summing_url_views"}]
Created file [/tmp/clickHouseDB_test.1.data]: 22200 rows...
Created file [/tmp/clickHouseDB_test.2.data]: 22200 rows...
Created file [/tmp/clickHouseDB_test.3.data]: 22200 rows...
Created file [/tmp/clickHouseDB_test.4.data]: 22200 rows...
Created file [/tmp/clickHouseDB_test.5.data]: 22200 rows...
insert ONE file:
use time:0.7
Array
(
[0] => Array
(
[sum(views)] => 22200
)
)
insert ALL file async:
use time:0.74
Array
(
[0] => Array
(
[sum(views)] => 133200
)
)
*/

View File

@ -0,0 +1,58 @@
<?php
include_once __DIR__ . '/../include.php';
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$input_params = [
'select_date' => ['2000-10-10', '2000-10-11', '2000-10-12'],
'limit' => 5,
'from_table' => 'table'
];
$db->enableQueryConditions();
$select = '
SELECT * FROM {from_table}
WHERE
{if select_date}
event_date IN (:select_date)
{else}
event_date=today()
{/if}
{if limit}
LIMIT {limit}
{/if}
';
$statement = $db->selectAsync($select, $input_params);
echo $statement->sql();
echo "\n";
/*
SELECT * FROM table
WHERE
event_date IN ('2000-10-10','2000-10-11','2000-10-12')
LIMIT 5
FORMAT JSON
*/
$input_params['select_date'] = false;
$statement = $db->selectAsync($select, $input_params);
echo $statement->sql();
echo "\n";
/*
SELECT * FROM table
WHERE
event_date=today()
LIMIT 5
FORMAT JSON
*/

View File

@ -0,0 +1,107 @@
<?php
include_once __DIR__ . '/../include.php';
include_once __DIR__ . '/Helper.php';
\ClickHouseDB\Example\Helper::init();
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
for ($f=0;$f<1000;$f++)
{
$list[$f]=$db->selectAsync('SELECT {num} as num',['num'=>$f]);
}
$db->executeAsync();
for ($f=0;$f<1000;$f++)
{
$c=$list[$f];
echo $f."\t";
$ret='-';
try{
$ret=$c->fetchOne('num');
}catch (Exception $e)
{
}
echo "$ret\n";
}
// -------------------------------- ------- ----------------------------------------------------------------
$db->write("DROP TABLE IF EXISTS summing_url_views");
$db->write('
CREATE TABLE IF NOT EXISTS summing_url_views (
event_date Date DEFAULT toDate(event_time),
event_time DateTime,
url_hash String,
site_id Int32,
views Int32,
v_00 Int32,
v_55 Int32
)
ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192)
');
echo "Table EXISTSs:" . json_encode($db->showTables()) . "\n";
// -------------------------------- CREATE csv file ----------------------------------------------------------------
$file_data_names = [
'/tmp/clickHouseDB_test.1.data',
'/tmp/clickHouseDB_test.2.data',
];
foreach ($file_data_names as $file_name) {
\ClickHouseDB\Example\Helper::makeSomeDataFile($file_name, 1);
}
// ----------------------------------------------------------------------------------------------------
echo "insert ONE file:\n";
$time_start = microtime(true);
$version_test = 3;
if ($version_test == 1) {
$statselect1 = $db->selectAsync('SELECT * FROM summing_url_views LIMIT 1');
$statselect2 = $db->selectAsync('SELECT * FROM summing_url_views LIMIT 1');
$stat = $db->insertBatchFiles('summing_url_views', ['/tmp/clickHouseDB_test.1.data'], [
'event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55'
]);
// 'Exception' with message 'Queue must be empty, before insertBatch,need executeAsync'
}
//
if ($version_test == 2) {
$statselect1 = $db->selectAsync('SELECT * FROM summing_url_views LIMIT 1');
print_r($statselect1->rows());
// 'Exception' with message 'Not have response'
}
// good
if ($version_test == 3) {
$statselect2 = $db->selectAsync('SELECT * FROM summing_url_views LIMIT 1');
$db->executeAsync();
$stat = $db->insertBatchFiles('summing_url_views', ['/tmp/clickHouseDB_test.1.data'], [
'event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55'
]);
$statselect1 = $db->selectAsync('SELECT * FROM summing_url_views LIMIT 1');
$db->executeAsync();
print_r($statselect1->rows());
}

View File

@ -0,0 +1,63 @@
<?php
include_once __DIR__ . '/../include.php';
include_once __DIR__ . '/Helper.php';
\ClickHouseDB\Example\Helper::init();
$config = include_once __DIR__ . '/00_config_connect.php';
$start_time = microtime(true);
$db = new ClickHouseDB\Client($config);
// some file names to data
$file_name_data1 = "/tmp/temp_csv.txt";
$file_name_data2 = "/tmp/site_keys.data";
// create CSV file
\ClickHouseDB\Example\Helper::makeListSitesKeysDataFile($file_name_data1, 1000, 2000); // see lib_example.php
\ClickHouseDB\Example\Helper::makeListSitesKeysDataFile($file_name_data2, 5000, 6000); // see lib_example.php
// create WhereInFile
$whereIn = new \ClickHouseDB\Query\WhereInFile();
// attachFile( {full_file_path} , {data_table_name} , [ { structure } ]
$whereIn->attachFile($file_name_data1, 'namex', ['site_id' => 'Int32', 'site_hash' => 'String'], \ClickHouseDB\Query\WhereInFile::FORMAT_CSV);
$whereIn->attachFile($file_name_data2, 'site_keys', ['site_id' => 'Int32', 'site_hash' => 'String'], \ClickHouseDB\Query\WhereInFile::FORMAT_CSV);
$result = $db->select('select 1', [], $whereIn);
print_r($result->rows());
// ----------------------------------------------- ASYNC ------------------------------------------------------------------------------------------
echo "\n----------------------- ASYNC ------------ \n";
$bindings['limit'] = 3;
$statements = [];
$whereIn = new \ClickHouseDB\Query\WhereInFile();
$whereIn->attachFile($file_name_data1, 'namex', ['site_id' => 'Int32', 'site_hash' => 'String'], \ClickHouseDB\Query\WhereInFile::FORMAT_CSV);
$statements[0] = $db->selectAsync('select 3', $bindings, $whereIn);
// change data file - for statement two
$whereIn = new \ClickHouseDB\Query\WhereInFile();
$whereIn->attachFile($file_name_data2, 'namex', ['site_id' => 'Int32', 'site_hash' => 'String'], \ClickHouseDB\Query\WhereInFile::FORMAT_CSV);
$statements[1] = $db->selectAsync('select 2', $bindings, $whereIn);
$db->executeAsync();
foreach ($statements as $statement) {
print_r($statement->rows());
}

View File

@ -0,0 +1,135 @@
<?php
include_once __DIR__ . '/../include.php';
include_once __DIR__ . '/Helper.php';
\ClickHouseDB\Example\Helper::init();
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$db->write("DROP TABLE IF EXISTS summing_url_views");
$db->write('
CREATE TABLE IF NOT EXISTS summing_url_views (
event_date Date DEFAULT toDate(event_time),
event_time DateTime,
url_hash String,
site_id Int32,
views Int32,
v_00 Int32,
v_55 Int32
)
ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192)
');
echo "Table EXISTS:" . json_encode($db->showTables()) . "\n";
// ------------------------------------------------------------------------------------------------------
echo "----------------------------------- CREATE big csv file -----------------------------------------------------------------\n";
$file_data_names = [
'/tmp/clickHouseDB_test.b.1.data',
'/tmp/clickHouseDB_test.b.2.data',
'/tmp/clickHouseDB_test.b.3.data',
'/tmp/clickHouseDB_test.b.4.data',
'/tmp/clickHouseDB_test.b.5.data',
];
$c = 0;
foreach ($file_data_names as $file_name) {
$c++;
\ClickHouseDB\Example\Helper::makeSomeDataFileBig($file_name, 40 * $c);
}
echo "----------------------------------------------------------------------------------------------------\n";
echo "insert ALL file async NO gzip:\n";
$db->settings()->max_execution_time(200);
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_url_views', $file_data_names, [
'event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55'
]);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
foreach ($result_insert as $state) {
echo "Info : " . json_encode($state->info_upload()) . "\n";
}
print_r($db->select('select sum(views) from summing_url_views')->rows());
echo "--------------------------------------- enableHttpCompression -------------------------------------------------------------\n";
echo "insert ALL file async + GZIP:\n";
$db->enableHttpCompression(true);
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_url_views', $file_data_names, [
'event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55'
]);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
foreach ($result_insert as $fileName => $state) {
echo "$fileName => " . json_encode($state->info_upload()) . "\n";
}
print_r($db->select('select sum(views) from summing_url_views')->rows());
echo "----------------------------------------------------------------------------------------------------\n";
echo ">>> rm -f /tmp/clickHouseDB_test.b.*\n";
/*
Table EXISTSs:[{"name":"summing_url_views"}]
----------------------------------- CREATE big csv file -----------------------------------------------------------------
Created file [/tmp/clickHouseDB_test.b.1.data]: 177600 rows... size = 25.74 MB
Created file [/tmp/clickHouseDB_test.b.2.data]: 355200 rows... size = 51.49 MB
Created file [/tmp/clickHouseDB_test.b.3.data]: 532800 rows... size = 77.23 MB
Created file [/tmp/clickHouseDB_test.b.4.data]: 710400 rows... size = 102.98 MB
Created file [/tmp/clickHouseDB_test.b.5.data]: 888000 rows... size = 128.72 MB
----------------------------------------------------------------------------------------------------
insert ALL file async NO gzip:
use time:100.94
Info : {"size_upload":"25.74 MB","upload_content":"25.74 MB","speed_upload":"10.11 Mbps","time_request":21.358527}
Info : {"size_upload":"51.49 MB","upload_content":"51.49 MB","speed_upload":"10.67 Mbps","time_request":40.490685}
Info : {"size_upload":"77.23 MB","upload_content":"77.23 MB","speed_upload":"10.52 Mbps","time_request":61.610698}
Info : {"size_upload":"102.98 MB","upload_content":"102.98 MB","speed_upload":"10.8 Mbps","time_request":80.016749}
Info : {"size_upload":"128.72 MB","upload_content":"128.72 MB","speed_upload":"10.7 Mbps","time_request":100.931881}
Array
(
[0] => Array
(
[sum(views)] => 2664000
)
)
--------------------------------------- enableHttpCompression -------------------------------------------------------------
insert ALL file async + GZIP:
use time:34.76
/tmp/clickHouseDB_test.b.1.data => {"size_upload":"5.27 MB","upload_content":"-1 bytes","speed_upload":"5.23 Mbps","time_request":8.444056}
/tmp/clickHouseDB_test.b.2.data => {"size_upload":"10.54 MB","upload_content":"-1 bytes","speed_upload":"5.53 Mbps","time_request":15.974618}
/tmp/clickHouseDB_test.b.3.data => {"size_upload":"15.80 MB","upload_content":"-1 bytes","speed_upload":"4.98 Mbps","time_request":26.64583}
/tmp/clickHouseDB_test.b.4.data => {"size_upload":"21.07 MB","upload_content":"-1 bytes","speed_upload":"6.3 Mbps","time_request":28.05784}
/tmp/clickHouseDB_test.b.5.data => {"size_upload":"26.34 MB","upload_content":"-1 bytes","speed_upload":"6.36 Mbps","time_request":34.738461}
Array
(
[0] => Array
(
[sum(views)] => 5328000
)
)
----------------------------------------------------------------------------------------------------
*/

View File

@ -0,0 +1,81 @@
<?php
include_once __DIR__ . '/../include.php';
include_once __DIR__ . '/Helper.php';
\ClickHouseDB\Example\Helper::init();
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$create = true;
if ($create) {
$db->write("DROP TABLE IF EXISTS summing_partions_views");
$db->write('
CREATE TABLE IF NOT EXISTS summing_partions_views (
event_date Date DEFAULT toDate(event_time),
event_time DateTime,
site_id Int32,
hash_id Int32,
views Int32
)
ENGINE = SummingMergeTree(event_date, (site_id,hash_id, event_time, event_date), 8192)
');
echo "Table EXISTS:" . json_encode($db->showTables()) . "\n";
echo "----------------------------------- CREATE csv file -----------------------------------------------------------------\n";
$file_data_names = [
'/tmp/clickHouseDB_test.part.1.data',
'/tmp/clickHouseDB_test.part.2.data',
'/tmp/clickHouseDB_test.part.3.data',
];
$c = 0;
foreach ($file_data_names as $file_name) {
$c++;
\ClickHouseDB\Example\Helper::makeSomeDataFileBigOldDates($file_name, $c);
}
echo "--------------------------------------- insert -------------------------------------------------------------\n";
echo "insert ALL file async + GZIP:\n";
$db->enableHttpCompression(true);
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_partions_views', $file_data_names, [
'event_time', 'site_id', 'hash_id', 'views'
]);
echo "use time:" . round(microtime(true) - $time_start, 2) . " sec.\n";
foreach ($result_insert as $fileName => $state) {
echo "$fileName => " . json_encode($state->info_upload()) . "\n";
}
}
echo "--------------------------------------- select -------------------------------------------------------------\n";
print_r($db->select('select min(event_date),max(event_date) from summing_partions_views ')->rows());
echo "--------------------------------------- list partitions -------------------------------------------------------------\n";
echo "databaseSize : " . json_encode($db->databaseSize()) . "\n";
echo "tableSize : " . json_encode($db->tableSize('summing_partions_views')) . "\n";
echo "partitions : " . json_encode($db->partitions('summing_partions_views', 2)) . "\n";
echo "--------------------------------------- drop partitions -------------------------------------------------------------\n";
echo "dropOldPartitions -30 days : " . json_encode($db->dropOldPartitions('summing_partions_views', 30)) . "\n";
echo "--------------------------------------- list partitions -------------------------------------------------------------\n";
echo "databaseSize : " . json_encode($db->databaseSize()) . "\n";
echo "tableSize : " . json_encode($db->tableSize('summing_partions_views')) . "\n";
echo "partitions : " . json_encode($db->partitions('summing_partions_views', 2)) . "\n";

View File

@ -0,0 +1,57 @@
<?php
include_once __DIR__ . '/../include.php';
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config, ['max_execution_time' => 100]);
if ($db->settings()->getSetting('max_execution_time') !== 100) {
throw new Exception("Bad work settings");
}
// set method
$config = [
'host' => 'x',
'port' => '8123',
'username' => 'x',
'password' => 'x'
];
$db = new ClickHouseDB\Client($config);
$db->settings()->set('max_execution_time', 100);
if ($db->settings()->getSetting('max_execution_time') !== 100) {
throw new Exception("Bad work settings");
}
// apply array method
$config = [
'host' => 'x',
'port' => '8123',
'username' => 'x',
'password' => 'x'
];
$db = new ClickHouseDB\Client($config);
$db->settings()->apply([
'max_execution_time' => 100,
'max_block_size' => 12345
]);
if ($db->settings()->getSetting('max_execution_time') !== 100) {
throw new Exception("Bad work settings");
}
if ($db->settings()->getSetting('max_block_size') !== 12345) {
throw new Exception("Bad work settings");
}
echo "getSetting - OK\n";

View File

@ -0,0 +1,58 @@
<?php
include_once __DIR__ . '/../include.php';
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
try {
$db->ping();
}
catch (ClickHouseDB\Exception\QueryException $E) {
echo "ERROR:" . $E->getMessage() . "\nOK\n";
}
// ------------------
$db = new ClickHouseDB\Client([
'host' => 'NO_DB_HOST.COM',
'port' => '8123',
'username' => 'x',
'password' => 'x'
]);
$db->setConnectTimeOut(1);
try {
$db->ping();
}
catch (ClickHouseDB\Exception\QueryException $E) {
echo "ERROR:" . $E->getMessage() . "\nOK\n";
}
// ------------------
$db = new ClickHouseDB\Client($config);
try {
$db->ping();
echo "PING : OK!\n";
}
catch (ClickHouseDB\Exception\QueryException $E) {
echo "ERROR:" . $E->getMessage() . "\nOK\n";
}
try {
$db->select("SELECT xxx as PPPP FROM ZZZZZ ")->rows();
}
catch (ClickHouseDB\Exception\DatabaseException $E) {
echo "ERROR : DatabaseException : " . $E->getMessage() . "\n"; // Table default.ZZZZZ doesn't exist.
}
// ----------------------------

View File

@ -0,0 +1,152 @@
<?php
include_once __DIR__ . '/../include.php';
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$db->write("DROP TABLE IF EXISTS arrays_test");
$res = $db->write('
CREATE TABLE IF NOT EXISTS arrays_test (
s_key String,
s_arr Array(UInt8)
) ENGINE = Memory
');
//------------------------------------------------------------------------------
echo "Insert\n";
$stat = $db->insert('arrays_test', [
['HASH1', [11, 22, 33]],
['HASH1', [11, 22, 55]],
], ['s_key', 's_arr']);
echo "Insert Done\n";
print_r($db->select('SELECT s_key, s_arr FROM arrays_test ARRAY JOIN s_arr')->rows());
$db->write("DROP TABLE IF EXISTS arrays_test_string");
$res = $db->write('
CREATE TABLE IF NOT EXISTS arrays_test_string (
s_key String,
s_arr Array(String)
) ENGINE = Memory
');
echo "Insert\n";
$stat = $db->insert('arrays_test_string', [
['HASH1', ["a", "dddd", "xxx"]],
['HASH1', ["b'\tx"]],
], ['s_key', 's_arr']);
echo "Insert Done\n";
print_r($db->select('SELECT s_key, s_arr FROM arrays_test_string ARRAY JOIN s_arr')->rows());
echo "\ntestRFCCSVWrite>>>>\n";
$fileName='/tmp/testRFCCSVWrite.CSV';
date_default_timezone_set('Europe/Moscow');
$db->write("DROP TABLE IF EXISTS testRFCCSVWrite");
$db->write('CREATE TABLE testRFCCSVWrite (
event_date Date DEFAULT toDate(event_time),
event_time DateTime,
strs String,
flos Float32,
ints Int32,
arr1 Array(UInt8),
arrs Array(String)
) ENGINE = TinyLog()');
@unlink($fileName);
$data=[
['event_time'=>date('Y-m-d H:i:s'),'strs'=>'SOME STRING','flos'=>1.1,'ints'=>1,'arr1'=>[1,2,3],'arrs'=>["A","B"]],
['event_time'=>date('Y-m-d H:i:s'),'strs'=>'SOME STRING','flos'=>2.3,'ints'=>2,'arr1'=>[1,2,3],'arrs'=>["A","B"]],
['event_time'=>date('Y-m-d H:i:s'),'strs'=>'SOME\'STRING','flos'=>0,'ints'=>0,'arr1'=>[1,2,3],'arrs'=>["A","B"]],
['event_time'=>date('Y-m-d H:i:s'),'strs'=>'SOME\'"TRING','flos'=>0,'ints'=>0,'arr1'=>[1,2,3],'arrs'=>["A","B"]],
['event_time'=>date('Y-m-d H:i:s'),'strs'=>"SOMET\nRI\n\"N\"G\\XX_ABCDEFG",'flos'=>0,'ints'=>0,'arr1'=>[1,2,3],'arrs'=>["A","B\nD\nC"]],
['event_time'=>date('Y-m-d H:i:s'),'strs'=>"ID_ARRAY",'flos'=>0,'ints'=>0,'arr1'=>[1,2,3],'arrs'=>["A","B\nD\nC"]]
];
//// 1.1 + 2.3 = 3.3999999761581
//
foreach ($data as $row)
{
file_put_contents($fileName,\ClickHouseDB\Quote\FormatLine::CSV($row)."\n",FILE_APPEND);
}
//
echo "FILE:\n\n";
echo file_get_contents($fileName)."\n\n----\n";
//
$db->insertBatchFiles('testRFCCSVWrite', [$fileName], [
'event_time',
'strs',
'flos',
'ints',
'arr1',
'arrs',
]);
$st=$db->select('SELECT * FROM testRFCCSVWrite');
print_r($st->rows());
//
echo "\n<<<<< TAB >>>>\n";
$fileName='/tmp/testRFCCSVWrite.TAB';@unlink($fileName);
$db->write("DROP TABLE IF EXISTS testTABWrite");
$db->write('CREATE TABLE testTABWrite (
event_date Date DEFAULT toDate(event_time),
event_time DateTime,
strs String,
flos Float32,
ints Int32,
arr1 Array(UInt8),
arrs Array(String)
) ENGINE = Log()');
$data=[
['event_time'=>date('Y-m-d H:i:s'),'strs'=>"STING\t\tSD!\"\nFCD\tSAD\t\nDSF",'flos'=>-2.3,'ints'=>123,'arr1'=>[1,2,3],'arrs'=>["A","B"]],
['event_time'=>date('Y-m-d H:i:s'),'strs'=>'SOME\'STRING','flos'=>0,'ints'=>12123,'arr1'=>[1,2,3],'arrs'=>["A","B"]],
['event_time'=>date('Y-m-d H:i:s'),'strs'=>'SOME\'"TR\tING','flos'=>0,'ints'=>0,'arr1'=>[1,2,3],'arrs'=>["A","B"]],
['event_time'=>date('Y-m-d H:i:s'),'strs'=>"SOMET\nRI\n\"N\"G\\XX_ABCDEFG",'flos'=>0,'ints'=>1,'arr1'=>[1,2,3],'arrs'=>["A","B\nD\ns\tC"]],
['event_time'=>date('Y-m-d H:i:s'),'strs'=>"ID_ARRAY",'flos'=>-2.3,'ints'=>-12123,'arr1'=>[1,2,3],'arrs'=>["A","B\nD\nC\n\t\n\tTABARRAYS"]]
];
foreach ($data as $row)
{
file_put_contents($fileName,\ClickHouseDB\Quote\FormatLine::TSV($row)."\n",FILE_APPEND);
}
//
echo "FILE:\n\n";
echo file_get_contents($fileName)."\n\n----\n";
//
$db->insertBatchTSVFiles('testTABWrite', [$fileName], [
'event_time',
'strs',
'flos',
'ints',
'arr1',
'arrs',
]);
$st=$db->select('SELECT * FROM testTABWrite');
print_r($st->rows());
$st=$db->select('SELECT round(sum(flos),5),sum(ints) FROM testTABWrite');
print_r($st->rows());
//

View File

@ -0,0 +1,43 @@
<?php
include_once __DIR__ . '/../include.php';
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
//$db->verbose();
$db->settings()->readonly(false);
$result = $db->select(
'SELECT 12 as {key} WHERE {key} = :value',
['key' => 'ping', 'value' => 12]
);
if ($result->fetchOne('ping') != 12) {
echo "Error : ? \n";
}
print_r($result->fetchOne());
echo 'elapsed :'.$result->statistics('elapsed')."\n";
echo 'rows_read :'.$result->statistics('rows_read')."\n";
echo 'bytes_read:'.$result->statistics('bytes_read')."\n";
//
$result = $db->select("SELECT 12 as ping");
print_r($result->statistics());
/*
"statistics":
{
"elapsed": 0.000029702,
"rows_read": 1,
"bytes_read": 1
}
*/

View File

@ -0,0 +1,80 @@
<?php
include_once __DIR__ . '/../include.php';
include_once __DIR__ . '/Helper.php';
\ClickHouseDB\Example\Helper::init();
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$db->enableHttpCompression(true);
$db->write("DROP TABLE IF EXISTS summing_url_views");
$db->write('
CREATE TABLE IF NOT EXISTS summing_url_views (
event_date Date DEFAULT toDate(event_time),
event_time DateTime,
url_hash String,
site_id Int32,
views Int32,
v_00 Int32,
v_55 Int32
)
ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192)
');
echo "Table EXISTS: " . json_encode($db->showTables()) . "\n";
echo $db->showCreateTable('summing_url_views');
// -------------------------------- CREATE csv file ----------------------------------------------------------------
$file_data_names = [
'/tmp/clickHouseDB_test.1.data',
'/tmp/clickHouseDB_test.2.data',
];
foreach ($file_data_names as $file_name) {
\ClickHouseDB\Example\Helper::makeSomeDataFile($file_name, 2);
}
// ----------------------------------------------------------------------------------------------------
echo "insert ALL file async:\n";
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_url_views', $file_data_names, [
'event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55'
]);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
print_r($db->select('select sum(views) from summing_url_views')->rows());
// ------------------------------------------------------------------------------------------------
$WriteToFile=new ClickHouseDB\Query\WriteToFile('/tmp/_1_select.csv');
$statement=$db->select('select * from summing_url_views',[],null,$WriteToFile);
print_r($statement->info());
//
$db->selectAsync('select * from summing_url_views limit 4',[],null,new ClickHouseDB\Query\WriteToFile('/tmp/_2_select.csv'));
$db->selectAsync('select * from summing_url_views limit 4',[],null,new ClickHouseDB\Query\WriteToFile('/tmp/_3_select.tab',true,'TabSeparatedWithNames'));
$db->selectAsync('select * from summing_url_views limit 4',[],null,new ClickHouseDB\Query\WriteToFile('/tmp/_4_select.tab',true,'TabSeparated'));
$statement=$db->selectAsync('select * from summing_url_views limit 54',[],null,new ClickHouseDB\Query\WriteToFile('/tmp/_5_select.csv',true,ClickHouseDB\Query\WriteToFile::FORMAT_CSV));
$db->executeAsync();
print_r($statement->info());
echo "END SELECT\n";
echo "TRY GZIP\n";
$WriteToFile=new ClickHouseDB\Query\WriteToFile('/tmp/_0_select.csv.gz');
$WriteToFile->setFormat(ClickHouseDB\Query\WriteToFile::FORMAT_TabSeparatedWithNames);
$WriteToFile->setGzip(true);// cat /tmp/_0_select.csv.gz | gzip -dc > /tmp/w.result
$statement=$db->select('select * from summing_url_views',[],null,$WriteToFile);
print_r($statement->info());
echo "OK!\n\n";

View File

@ -0,0 +1,49 @@
<?php
include_once __DIR__ . '/../include.php';
include_once __DIR__ . '/Helper.php';
\ClickHouseDB\Example\Helper::init();
class CustomDegeneration implements \ClickHouseDB\Query\Degeneration
{
private $bindings=[];
public function bindParams(array $bindings)
{
$this->bindings=$bindings;
}
public function process($sql)
{
if (sizeof($this->bindings))
{
foreach ($this->bindings as $key=>$value)
{
$sql=str_ireplace('%'.$key.'%',$value,$sql);
}
}
return str_ireplace('XXXX','SELECT',$sql);
}
}
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
print_r($db->select('SELECT 1 as ping')->fetchOne());
// CustomConditions
$db->addQueryDegeneration(new CustomDegeneration());
// strreplace XXXX=>SELECT
print_r($db->select('XXXX 1 as ping')->fetchOne());
// SELECT 1 as ping
print_r($db->select('XXXX 1 as %ZX%',['ZX'=>'ping'])->fetchOne());

View File

@ -0,0 +1,145 @@
<?php
include_once __DIR__ . '/../include.php';
include_once __DIR__ . '/Helper.php';
\ClickHouseDB\Example\Helper::init();
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$_flag_create_table=false;
$size=$db->tableSize('summing_url_views_cityHash64_site_id');
echo "Site table summing_url_views_cityHash64_site_id : ".(isset($size['size'])?$size['size']:'false')."\n";
if (!isset($size['size'])) $_flag_create_table=true;
if ($_flag_create_table) {
$db->write("DROP TABLE IF EXISTS summing_url_views_cityHash64_site_id");
$re=$db->write('
CREATE TABLE IF NOT EXISTS summing_url_views_cityHash64_site_id (
event_date Date DEFAULT toDate(event_time),
event_time DateTime,
url_hash String,
site_id Int32,
views Int32,
v_00 Int32,
v_55 Int32
)
ENGINE = SummingMergeTree(event_date, cityHash64(site_id,event_time),(site_id, url_hash, event_time, event_date,cityHash64(site_id,event_time)), 8192)
');
echo "Table EXISTS:" . print_r($db->showTables()) . "\n";
// ------------------------------------------------------------------------------------------------------
echo "----------------------------------- CREATE big csv file -----------------------------------------------------------------\n";
$file_data_names = [
'/tmp/clickHouseDB_test.big.1.data',
'/tmp/clickHouseDB_test.big.2.data',
'/tmp/clickHouseDB_test.big.3.data',
];
$c = 0;
foreach ($file_data_names as $file_name) {
$c++;
$shift_days=( -1* $c*3);
\ClickHouseDB\Example\Helper::makeSomeDataFileBig($file_name, 23 * $c,$shift_days);
}
echo "----------------------------------------------------------------------------------------------------\n";
echo "insert ALL file async + GZIP:\n";
$db->enableHttpCompression(true);
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_url_views_cityHash64_site_id', $file_data_names, [
'event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55'
]);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
foreach ($result_insert as $fileName => $state) {
echo "$fileName => " . json_encode($state->info_upload()) . "\n";
}
}
echo "------------------------------- COMPARE event_date ---------------------------------------------------------------------\n";
$rows=($db->select('select event_date,sum(views) as v from summing_url_views_cityHash64_site_id GROUP BY event_date ORDER BY event_date')->rowsAsTree('event_date'));
$samp=($db->select('select event_date,(sum(views)*10) as v from summing_url_views_cityHash64_site_id SAMPLE 0.1 GROUP BY event_date ORDER BY event_date ')->rowsAsTree('event_date'));
foreach ($rows as $event_date=>$data)
{
echo $event_date."\t".$data['v']."\t".@$samp[$event_date]['v']."\n";
}
$rows=($db->select('select site_id,sum(views) as v from summing_url_views_cityHash64_site_id GROUP BY site_id ORDER BY site_id')->rowsAsTree('site_id'));
$samp=($db->select('select site_id,(sum(views)) as v from summing_url_views_cityHash64_site_id SAMPLE 0.5 GROUP BY site_id ORDER BY site_id ')->rowsAsTree('site_id'));
foreach ($rows as $event_date=>$data)
{
echo $event_date."\t".$data['v']."\t".intval(@$samp[$event_date]['v'])."\n";
}
for($f=1;$f<=9;$f++)
{
$SAMPLE=$f/10;
$CQL='select site_id,(sum(views)) as v from summing_url_views_cityHash64_site_id SAMPLE '.$SAMPLE.' WHERE site_id=34 GROUP BY site_id ORDER BY site_id ';
echo $CQL."\n";
$rows=($db->select('select site_id,sum(views) as v from summing_url_views_cityHash64_site_id WHERE site_id=34 GROUP BY site_id ORDER BY site_id')->rowsAsTree('site_id'));
$samp=($db->select($CQL)->rowsAsTree('site_id'));
foreach ($rows as $id=>$data)
{
$s=$samp[$id]['v'];
$v=$data['v'];
$percent=round( (100*$s) /$v ,2);
$kof=(100/$percent);
$norma_views=$s*(100/$percent);
echo "Сумма показов без SAMPLE = " .$v."\n";
echo "Сумма показов c SAMPLE = " .$s."\n";
echo "Процент = " .$percent."\n";
echo "На что домжнож.семлир.данн= " .$kof."\n";
echo "Сумма показов расчитанное = " .$norma_views."\n";
/// >> 1/(0.8) = для SAMPLE 0.8
/// >> 1/(0.5) = для SAMPLE 0.5
}
echo "\n\n";
}
/*
*/

View File

@ -0,0 +1,196 @@
<?php
include_once __DIR__ . '/../include.php';
include_once __DIR__ . '/Helper.php';
\ClickHouseDB\Example\Helper::init();
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$_flag_create_table=false;
$db->write("DROP TABLE IF EXISTS summing_url_views_intHash32_site_id");
$size=$db->tableSize('summing_url_views_intHash32_site_id');
echo "Site table summing_url_views_intHash32_site_id : ".(isset($size['size'])?$size['size']:'false')."\n";
if (!isset($size['size'])) $_flag_create_table=true;
if ($_flag_create_table) {
$db->write("DROP TABLE IF EXISTS summing_url_views_intHash32_site_id");
$re=$db->write('
CREATE TABLE IF NOT EXISTS summing_url_views_intHash32_site_id (
event_date Date DEFAULT toDate(event_time),
event_time DateTime,
url_hash String,
site_id Int32,
views Int32,
v_00 Int32,
v_55 Int32
)
ENGINE = SummingMergeTree(event_date, intHash32(event_time,site_id),(site_id, url_hash, event_time, event_date,intHash32(event_time,site_id)), 8192)
');
echo "Table EXISTS:" . print_r($db->showTables()) . "\n";
// ------------------------------------------------------------------------------------------------------
echo "----------------------------------- CREATE big csv file -----------------------------------------------------------------\n";
$file_data_names = [
'/tmp/clickHouseDB_test.big.1.data',
'/tmp/clickHouseDB_test.big.2.data',
'/tmp/clickHouseDB_test.big.3.data',
];
$c = 0;
foreach ($file_data_names as $file_name) {
$c++;
$shift_days=( -1* $c*3);
\ClickHouseDB\Example\Helper::makeSomeDataFileBig($file_name, 4 * $c,$shift_days);
}
echo "----------------------------------------------------------------------------------------------------\n";
echo "insert ALL file async + GZIP:\n";
$db->enableHttpCompression(true);
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_url_views_intHash32_site_id', $file_data_names, [
'event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55'
]);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
foreach ($result_insert as $fileName => $state) {
echo "$fileName => " . json_encode($state->info_upload()) . "\n";
}
}
echo "------------------------------- COMPARE event_date ---------------------------------------------------------------------\n";
$rows=($db->select('select event_date,sum(views) as v from summing_url_views_intHash32_site_id GROUP BY event_date ORDER BY event_date')->rowsAsTree('event_date'));
$samp=($db->select('select event_date,sum(views) as v from summing_url_views_intHash32_site_id SAMPLE 0.5 GROUP BY event_date ORDER BY event_date ')->rowsAsTree('event_date'));
foreach ($rows as $event_date=>$data)
{
echo $event_date."\t".$data['v']."\t".(@$samp[$event_date]['v']*(1/0.5))."\n";
}
$rows=($db->select('select site_id,sum(views) as v from summing_url_views_intHash32_site_id GROUP BY site_id ORDER BY site_id')->rowsAsTree('site_id'));
$samp=($db->select('select site_id,(sum(views)) as v from summing_url_views_intHash32_site_id SAMPLE 0.5 GROUP BY site_id ORDER BY site_id ')->rowsAsTree('site_id'));
foreach ($rows as $event_date=>$data)
{
echo $event_date."\t".$data['v']."\t".intval(@$samp[$event_date]['v'])."\n";
}
/*
Когда мы семплируем данные по ключу intHash32(site_id), и достаем данные GROUP BY site_id
Сумма показов по ключу site_id даст точное кол-во показов , но в выборке будет отобранно только тот процент который указан
select site_id,(sum(views)) as v from summing_url_views_intHash32_site_id SAMPLE 0.1 GROUP BY site_id ORDER BY site_id
VS
select site_id,sum(views) as v from summing_url_views_intHash32_site_id GROUP BY site_id ORDER BY site_id
48 16560 0
47 16560 0
46 16560 16560
45 16560 0
44 16560 0
43 16560 0
42 16560 0
41 16560 0
40 16560 0
39 16560 0
38 16560 16560
37 16560 0
36 16560 16560
35 16560 0
34 16560 0
33 16560 16560
32 16560 0
31 16560 0
30 16560 0
29 16560 0
28 16560 0
27 16560 0
26 16560 0
25 16560 0
24 16560 0
23 16560 0
22 16560 0
21 16560 0
20 16560 16560
19 16560 0
18 16560 0
17 16560 0
16 16560 0
15 16560 0
14 16560 0
13 16560 0
12 16560 0
Если увеличить SAMPLE 0.5 => 50% прочитвется по ключу site_id
48 16560 0
47 16560 0
46 16560 16560
45 16560 16560
44 16560 16560
43 16560 0
42 16560 16560
41 16560 16560
40 16560 16560
39 16560 16560
38 16560 16560
37 16560 16560
36 16560 16560
35 16560 16560
34 16560 0
33 16560 16560
32 16560 16560
31 16560 16560
30 16560 16560
29 16560 0
28 16560 16560
27 16560 16560
26 16560 0
25 16560 0
24 16560 0
23 16560 0
22 16560 0
21 16560 16560
20 16560 16560
19 16560 16560
18 16560 16560
17 16560 16560
16 16560 0
15 16560 16560
14 16560 16560
13 16560 16560
12 16560 16560
*/

View File

@ -0,0 +1,12 @@
<?php
include_once __DIR__ . '/../include.php';
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$db->enableLogQueries()->enableHttpCompression();
//----------------------------------------
print_r($db->select('SELECT * FROM system.query_log')->rows());

View File

@ -0,0 +1,36 @@
<?php
include_once __DIR__ . '/../include.php';
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$db->enableExtremes(true)->enableHttpCompression();
$db->setReadOnlyUser(true);
// exec
$db->showDatabases();
// ----------------------------
$db = new ClickHouseDB\Client($config);
//$db->enableLogQueries()->enableHttpCompression();
//----------------------------------------
//print_r($db->select('SELECT * FROM system.query_log')->rows());
//----------------------------------------
$db->enableExtremes(true)->enableHttpCompression();
$db->showDatabases();
echo "OK?\n";
// ---------

View File

@ -0,0 +1,138 @@
<?php
include_once __DIR__ . '/../include.php';
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$db->enableExtremes(true)->enableHttpCompression();
$db->write("DROP TABLE IF EXISTS xxxx");
$db->write('
CREATE TABLE IF NOT EXISTS xxxx (
event_date Date,
url_hash String,
site_id Int32,
views Int32
)
ENGINE = SummingMergeTree(event_date, (site_id, url_hash), 8192)
');
// ARRAY TO TABLE
$rows=[
['2017-01-01','XXXXX',123,1],
['2017-01-02','XXXXX',123,1],
['2017-01-03','XXXXX',123,1],
['2017-01-04','XXXXX',123,1],
['2017-01-05','XXXXX',123,1],
['2017-01-06','XXXXX',123,1],
['2017-01-07','XXXXX',123,1]
];
// Write to file array
$temp_file_name='/tmp/_test_data.TSV';
if (file_exists($temp_file_name)) unlink('/tmp/_test_data.TSV');
foreach ($rows as $row)
{
file_put_contents($temp_file_name,\ClickHouseDB\Quote\FormatLine::TSV($row)."\n",FILE_APPEND);
}
echo "CONTENT FILES:\n";
echo file_get_contents($temp_file_name);
echo "------\n";
//
$db->insertBatchTSVFiles('xxxx', [$temp_file_name], [
'event_date',
'url_hash',
'site_id',
'views'
]);
print_r($db->select('SELECT * FROM xxxx')->rows());
/**
CONTENT FILES:
2017-01-01 XXXXX 123 1
2017-01-02 XXXXX 123 1
2017-01-03 XXXXX 123 1
2017-01-04 XXXXX 123 1
2017-01-05 XXXXX 123 1
2017-01-06 XXXXX 123 1
2017-01-07 XXXXX 123 1
------
Array
(
[0] => Array
(
[event_date] => 2017-01-01
[url_hash] => XXXXX
[site_id] => 123
[views] => 1
)
[1] => Array
(
[event_date] => 2017-01-02
[url_hash] => XXXXX
[site_id] => 123
[views] => 1
)
[2] => Array
(
[event_date] => 2017-01-03
[url_hash] => XXXXX
[site_id] => 123
[views] => 1
)
[3] => Array
(
[event_date] => 2017-01-04
[url_hash] => XXXXX
[site_id] => 123
[views] => 1
)
[4] => Array
(
[event_date] => 2017-01-05
[url_hash] => XXXXX
[site_id] => 123
[views] => 1
)
[5] => Array
(
[event_date] => 2017-01-06
[url_hash] => XXXXX
[site_id] => 123
[views] => 1
)
[6] => Array
(
[event_date] => 2017-01-07
[url_hash] => XXXXX
[site_id] => 123
[views] => 1
)
)
*
*/

View File

@ -0,0 +1,39 @@
<?php
include_once __DIR__ . '/../include.php';
$config = include_once __DIR__ . '/00_config_connect.php';
$db = new ClickHouseDB\Client($config);
$db->verbose();
// ---------------------------------------- NO HTTPS ----------------------------------------
$db->select('SELECT 11');
// ---------------------------------------- ADD HTTPS ----------------------------------------
$db->https();
$db->select('SELECT 11');
// --------------------- $db->settings()->https(); --------------------------------
$db = new ClickHouseDB\Client($config);
$db->verbose();
$db->settings()->https();
$db->select('SELECT 11');
// --------------------- $config['https']=true; --------------------------------
$config['https']=true;
$db = new ClickHouseDB\Client($config);
$db->verbose();
$db->select('SELECT 11');

View File

@ -0,0 +1,35 @@
<?php
include_once __DIR__ . '/../include.php';
//
$config = include_once __DIR__ . '/00_config_connect.php';
//
class progress {
public static function printz($data)
{
echo "CALL CLASS: ".json_encode($data)."\n";
}
}
$db = new ClickHouseDB\Client($config);
// need for test
$db->settings()->set('max_block_size', 1);
// ---------------------------------------- ----------------------------------------
$db->progressFunction(function ($data) {
echo "CALL FUNCTION:".json_encode($data)."\n";
});
$st=$db->select('SELECT number,sleep(0.2) FROM system.numbers limit 5');
// ---------------------------------------- ----------------------------------------
$db->settings()->set('http_headers_progress_interval_ms', 15); // change interval
$db->progressFunction(['progress','printz']);
$st=$db->select('SELECT number,sleep(0.1) FROM system.numbers limit 5');

View File

@ -0,0 +1,100 @@
<?php
include_once __DIR__ . '/../include.php';
//
$config = include_once __DIR__ . '/00_config_connect.php';
echo "\nPrepare....\n";
$client = new ClickHouseDB\Client($config);
$client->write('DROP TABLE IF EXISTS _phpCh_SteamTest');
$client->write('CREATE TABLE _phpCh_SteamTest (a Int32) Engine=Log');
echo "\n\n------------------------------------ 0 ---------------------------------------------------------------------------------\n\n";
$stream = fopen('php://memory','r+');
for($f=0;$f<121123;$f++)
fwrite($stream, json_encode(['a'=>$f]).PHP_EOL );
rewind($stream);
echo "\nstreamWrite....\n";
$streamWrite=new ClickHouseDB\Transport\StreamWrite($stream);
$streamWrite->applyGzip();
$callable = function ($ch, $fd, $length) use ($stream) {
return ($line = fread($stream, $length)) ? $line : '';
};
$streamWrite->closure($callable);
$r=$client->streamWrite($streamWrite,'INSERT INTO {table_name} FORMAT JSONEachRow', ['table_name'=>'_phpCh_SteamTest']);
print_r($r->info_upload());
print_r($client->select("SELECT sum(a) as s FROM _phpCh_SteamTest ")->fetchOne('s'));
echo "\n\n------------------------------------ 1 ---------------------------------------------------------------------------------\n\n";
$stream = fopen('php://memory','r+');
$streamRead=new ClickHouseDB\Transport\StreamRead($stream);
$r=$client->streamRead($streamRead,'SELECT sin(number) as sin,cos(number) as cos FROM {table_name} LIMIT 4 FORMAT JSONEachRow', ['table_name'=>'system.numbers']);
rewind($stream);
while (($buffer = fgets($stream, 4096)) !== false) {
echo ">>> ".$buffer;
}
fclose($stream);
echo "\n\n---------------------------------- 2 --------------------------------------------------------------------------------------\n\n";
$stream = fopen('php://memory','r+');
$streamRead=new ClickHouseDB\Transport\StreamRead($stream);
$callable = function ($ch, $string) use ($stream) {
// some magic for _BLOCK_ data
fwrite($stream, str_ireplace('"sin"','"max"',$string));
return strlen($string);
};
$streamRead->closure($callable);
$r=$client->streamRead($streamRead,'SELECT sin(number) as sin,cos(number) as cos FROM {table_name} LIMIT 44 FORMAT JSONEachRow', ['table_name'=>'system.numbers']);
echo "size_download:".($r->info()['size_download'])."\n";
rewind($stream);
while (($buffer = fgets($stream, 4096)) !== false) {
echo "".$buffer;
}
fclose($stream);
// ------------------------------------------------------------------------------------------------------------------------

View File

@ -0,0 +1,33 @@
<?php
// Functions Quote CSV , TSV , Insert
include_once __DIR__ . '/src/Quote/StrictQuoteLine.php';
include_once __DIR__ . '/src/Quote/FormatLine.php';
include_once __DIR__ . '/src/Quote/CSV.php';
// Exception
include_once __DIR__ . '/src/Exception/ClickHouseException.php';
include_once __DIR__ . '/src/Exception/QueryException.php';
include_once __DIR__ . '/src/Exception/DatabaseException.php';
include_once __DIR__ . '/src/Exception/TransportException.php';
// Client
include_once __DIR__ . '/src/Statement.php';
include_once __DIR__ . '/src/Client.php';
include_once __DIR__ . '/src/Settings.php';
include_once __DIR__ . '/src/Cluster.php';
// Query
include_once __DIR__ . '/src/Query/Degeneration.php';
include_once __DIR__ . '/src/Query/Degeneration/Bindings.php';
include_once __DIR__ . '/src/Query/Degeneration/Conditions.php';
include_once __DIR__ . '/src/Query/WriteToFile.php';
include_once __DIR__ . '/src/Query/WhereInFile.php';
include_once __DIR__ . '/src/Query/Query.php';
// Transport
include_once __DIR__ . '/src/Transport/Http.php';
include_once __DIR__ . '/src/Transport/CurlerRolling.php';
include_once __DIR__ . '/src/Transport/CurlerRequest.php';
include_once __DIR__ . '/src/Transport/CurlerResponse.php';
include_once __DIR__ . '/src/Transport/IStream.php';
include_once __DIR__ . '/src/Transport/Stream.php';
include_once __DIR__ . '/src/Transport/StreamRead.php';
include_once __DIR__ . '/src/Transport/StreamWrite.php';
include_once __DIR__ . '/src/Transport/StreamInsert.php';

View File

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<ruleset>
<arg name="basepath" value="."/>
<arg name="extensions" value="php"/>
<arg name="colors"/>
<arg name="parallel" value="80"/>
<arg name="cache" value=".phpcs-cache"/>
<!-- Ignore warnings, show progress of the run and show sniff names -->
<arg value="nps"/>
<file>src</file>
<rule ref="Doctrine"/>
<rule ref="SlevomatCodingStandard.Namespaces.AlphabeticallySortedUses">
<properties>
<property name="psr12Compatible" type="bool" value="true"/>
</properties>
</rule>
</ruleset>

View File

@ -0,0 +1,6 @@
parameters:
level: 1
paths:
- %currentWorkingDirectory%/src
- %currentWorkingDirectory%/tests

View File

@ -0,0 +1,38 @@
<?xml version="1.0" encoding="UTF-8"?>
<phpunit xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="vendor/phpunit/phpunit/phpunit.xsd"
beStrictAboutChangesToGlobalState="true"
beStrictAboutOutputDuringTests="true"
beStrictAboutTodoAnnotatedTests="true"
colors="true"
verbose="true"
>
<testsuite name="Diff">
<directory>tests</directory>
</testsuite>
<filter>
<whitelist>
<directory suffix=".php">src</directory>
</whitelist>
</filter>
<php>
<env name="CLICKHOUSE_HOST" value="127.0.0.1" />
<env name="CLICKHOUSE_PORT" value="8123" />
<env name="CLICKHOUSE_USER" value="default" />
<env name="CLICKHOUSE_PASSWORD" value="" />
<env name="CLICKHOUSE_DATABASE" value="php_clickhouse" />
<env name="CLICKHOUSE_TMPPATH" value="/tmp" />
</php>
<logging>
<log
showOnlySummary="true"
showUncoveredFiles="true"
target="php://stdout"
type="coverage-text"
/>
<log type="coverage-clover" target="temp/clover.xml"/>
</logging>
</phpunit>

View File

@ -0,0 +1,891 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB;
use ClickHouseDB\Exception\QueryException;
use ClickHouseDB\Query\Degeneration;
use ClickHouseDB\Query\Degeneration\Bindings;
use ClickHouseDB\Query\Degeneration\Conditions;
use ClickHouseDB\Query\WhereInFile;
use ClickHouseDB\Query\WriteToFile;
use ClickHouseDB\Quote\FormatLine;
use ClickHouseDB\Transport\Http;
use ClickHouseDB\Transport\Stream;
use function array_flip;
use function array_keys;
use function array_rand;
use function array_values;
use function count;
use function date;
use function implode;
use function in_array;
use function is_array;
use function is_callable;
use function is_file;
use function is_readable;
use function is_string;
use function sprintf;
use function stripos;
use function strtotime;
use function trim;
class Client
{
const SUPPORTED_FORMATS = ['TabSeparated', 'TabSeparatedWithNames', 'CSV', 'CSVWithNames', 'JSONEachRow'];
/** @var Http */
private $transport;
/** @var string */
private $connectUsername;
/** @var string */
private $connectPassword;
/** @var string */
private $connectHost;
/** @var string */
private $connectPort;
/** @var bool */
private $connectUserReadonly = false;
/**
* @param mixed[] $connectParams
* @param mixed[] $settings
*/
public function __construct(array $connectParams, array $settings = [])
{
if (! isset($connectParams['username'])) {
throw new \InvalidArgumentException('not set username');
}
if (! isset($connectParams['password'])) {
throw new \InvalidArgumentException('not set password');
}
if (! isset($connectParams['port'])) {
throw new \InvalidArgumentException('not set port');
}
if (! isset($connectParams['host'])) {
throw new \InvalidArgumentException('not set host');
}
$this->connectUsername = $connectParams['username'];
$this->connectPassword = $connectParams['password'];
$this->connectPort = $connectParams['port'];
$this->connectHost = $connectParams['host'];
// init transport class
$this->transport = new Http(
$this->connectHost,
$this->connectPort,
$this->connectUsername,
$this->connectPassword
);
$this->transport->addQueryDegeneration(new Bindings());
// apply settings to transport class
$this->settings()->database('default');
if (! empty($settings)) {
$this->settings()->apply($settings);
}
if (isset($connectParams['readonly'])) {
$this->setReadOnlyUser($connectParams['readonly']);
}
if (isset($connectParams['https'])) {
$this->https($connectParams['https']);
}
$this->enableHttpCompression();
}
/**
* if the user has only read in the config file
*/
public function setReadOnlyUser(bool $flag)
{
$this->connectUserReadonly = $flag;
$this->settings()->setReadOnlyUser($this->connectUserReadonly);
}
/**
* Clear Degeneration processing request [template ]
*
* @return bool
*/
public function cleanQueryDegeneration()
{
return $this->transport->cleanQueryDegeneration();
}
/**
* add Degeneration processing
*
* @return bool
*/
public function addQueryDegeneration(Degeneration $degeneration)
{
return $this->transport->addQueryDegeneration($degeneration);
}
/**
* add Conditions in query
*
* @return bool
*/
public function enableQueryConditions()
{
return $this->transport->addQueryDegeneration(new Conditions());
}
/**
* Set connection host
*
* @param string|string[] $host
*/
public function setHost($host)
{
if (is_array($host)) {
$host = array_rand(array_flip($host));
}
$this->connectHost = $host;
$this->transport()->setHost($host);
}
/**
* @return Settings
*/
public function setTimeout(float $timeout)
{
return $this->settings()->max_execution_time($timeout);
}
/**
* @return mixed
*/
public function getTimeout()
{
return $this->settings()->getTimeOut();
}
/**
* ConnectTimeOut in seconds ( support 1.5 = 1500ms )
*/
public function setConnectTimeOut(float $connectTimeOut)
{
$this->transport()->setConnectTimeOut($connectTimeOut);
}
/**
* @return int
*/
public function getConnectTimeOut()
{
return $this->transport()->getConnectTimeOut();
}
/**
* @return Http
*/
public function transport()
{
if (! $this->transport) {
throw new \InvalidArgumentException('Empty transport class');
}
return $this->transport;
}
/**
* @return string
*/
public function getConnectHost()
{
return $this->connectHost;
}
/**
* @return string
*/
public function getConnectPassword()
{
return $this->connectPassword;
}
/**
* @return string
*/
public function getConnectPort()
{
return $this->connectPort;
}
/**
* @return string
*/
public function getConnectUsername()
{
return $this->connectUsername;
}
/**
* @return Http
*/
public function getTransport()
{
return $this->transport;
}
/**
* @return mixed
*/
public function verbose()
{
return $this->transport()->verbose(true);
}
/**
* @return Settings
*/
public function settings()
{
return $this->transport()->settings();
}
/**
* @return static
*/
public function useSession(bool $useSessionId = false)
{
if (! $this->settings()->getSessionId()) {
if (! $useSessionId) {
$this->settings()->makeSessionId();
} else {
$this->settings()->session_id($useSessionId);
}
}
return $this;
}
/**
* @return mixed
*/
public function getSession()
{
return $this->settings()->getSessionId();
}
/**
* Query CREATE/DROP
*
* @param mixed[] $bindings
* @return Statement
*/
public function write(string $sql, array $bindings = [], bool $exception = true)
{
return $this->transport()->write($sql, $bindings, $exception);
}
/**
* set db name
* @return static
*/
public function database(string $db)
{
$this->settings()->database($db);
return $this;
}
/**
* Write to system.query_log
*
* @return static
*/
public function enableLogQueries(bool $flag = true)
{
$this->settings()->set('log_queries', (int) $flag);
return $this;
}
/**
* Compress the result if the HTTP client said that it understands data compressed with gzip or deflate
*
* @return static
*/
public function enableHttpCompression(bool $flag = true)
{
$this->settings()->enableHttpCompression($flag);
return $this;
}
/**
* Enable / Disable HTTPS
*
* @return static
*/
public function https(bool $flag = true)
{
$this->settings()->https($flag);
return $this;
}
/**
* Read extremes of the result columns. They can be output in JSON-formats.
*
* @return static
*/
public function enableExtremes(bool $flag = true)
{
$this->settings()->set('extremes', (int) $flag);
return $this;
}
/**
* @param mixed[] $bindings
* @return Statement
*/
public function select(
string $sql,
array $bindings = [],
WhereInFile $whereInFile = null,
WriteToFile $writeToFile = null
) {
return $this->transport()->select($sql, $bindings, $whereInFile, $writeToFile);
}
/**
* @return bool
*/
public function executeAsync()
{
return $this->transport()->executeAsync();
}
/**
* set progressFunction
*/
public function progressFunction(callable $callback)
{
if (! is_callable($callback)) {
throw new \InvalidArgumentException('Not is_callable progressFunction');
}
if (! $this->settings()->is('send_progress_in_http_headers')) {
$this->settings()->set('send_progress_in_http_headers', 1);
}
if (! $this->settings()->is('http_headers_progress_interval_ms')) {
$this->settings()->set('http_headers_progress_interval_ms', 100);
}
$this->transport()->setProgressFunction($callback);
}
/**
* prepare select
*
* @param mixed[] $bindings
* @return Statement
*/
public function selectAsync(
string $sql,
array $bindings = [],
WhereInFile $whereInFile = null,
WriteToFile $writeToFile = null
) {
return $this->transport()->selectAsync($sql, $bindings, $whereInFile, $writeToFile);
}
/**
* SHOW PROCESSLIST
*
* @return array
*/
public function showProcesslist()
{
return $this->select('SHOW PROCESSLIST')->rows();
}
/**
* show databases
*
* @return array
*/
public function showDatabases()
{
return $this->select('show databases')->rows();
}
/**
* statement = SHOW CREATE TABLE
*
* @return mixed
*/
public function showCreateTable(string $table)
{
return $this->select('SHOW CREATE TABLE ' . $table)->fetchOne('statement');
}
/**
* SHOW TABLES
*
* @return mixed[]
*/
public function showTables()
{
return $this->select('SHOW TABLES')->rowsAsTree('name');
}
/**
* Get the number of simultaneous/Pending requests
*
* @return int
*/
public function getCountPendingQueue()
{
return $this->transport()->getCountPendingQueue();
}
/**
* @param mixed[][] $values
* @param string[] $columns
*/
public function insert(string $table, array $values, array $columns = []) : Statement
{
if (empty($values)) {
throw QueryException::cannotInsertEmptyValues();
}
if (stripos($table, '`') === false && stripos($table, '.') === false) {
$table = '`' . $table . '`'; //quote table name for dot names
}
$sql = 'INSERT INTO ' . $table;
if (count($columns) !== 0) {
$sql .= ' (`' . implode('`,`', $columns) . '`) ';
}
$sql .= ' VALUES ';
foreach ($values as $row) {
$sql .= ' (' . FormatLine::Insert($row) . '), ';
}
$sql = trim($sql, ', ');
return $this->transport()->write($sql);
}
/**
*       * Prepares the values to insert from the associative array.
*       * There may be one or more lines inserted, but then the keys inside the array list must match (including in the sequence)
*       *
*       * @param mixed[] $values - array column_name => value (if we insert one row) or array list column_name => value if we insert many lines
*       * @return mixed[][] - list of arrays - 0 => fields, 1 => list of value arrays for insertion
*       */
public function prepareInsertAssocBulk(array $values)
{
if (isset($values[0]) && is_array($values[0])) { //случай, когда много строк вставляется
$preparedFields = array_keys($values[0]);
$preparedValues = [];
foreach ($values as $idx => $row) {
$_fields = array_keys($row);
if ($_fields !== $preparedFields) {
throw new QueryException(
sprintf(
'Fields not match: %s and %s on element %s',
implode(',', $_fields),
implode(',', $preparedFields),
$idx
)
);
}
$preparedValues[] = array_values($row);
}
} else {
$preparedFields = array_keys($values);
$preparedValues = [array_values($values)];
}
return [$preparedFields, $preparedValues];
}
/**
* Inserts one or more rows from an associative array.
* If there is a discrepancy between the keys of the value arrays (or their order) - throws an exception.
*
* @param mixed[] $values - array column_name => value (if we insert one row) or array list column_name => value if we insert many lines
* @return Statement
*/
public function insertAssocBulk(string $tableName, array $values)
{
list($columns, $vals) = $this->prepareInsertAssocBulk($values);
return $this->insert($tableName, $vals, $columns);
}
/**
* insert TabSeparated files
*
* @param string|string[] $fileNames
* @param string[] $columns
* @return mixed
*/
public function insertBatchTSVFiles(string $tableName, $fileNames, array $columns = [])
{
return $this->insertBatchFiles($tableName, $fileNames, $columns, 'TabSeparated');
}
/**
* insert Batch Files
*
* @param string|string[] $fileNames
* @param string[] $columns
* @param string $format ['TabSeparated','TabSeparatedWithNames','CSV','CSVWithNames']
* @return Statement[]
* @throws Exception\TransportException
*/
public function insertBatchFiles(string $tableName, $fileNames, array $columns = [], string $format = 'CSV')
{
if (is_string($fileNames)) {
$fileNames = [$fileNames];
}
if ($this->getCountPendingQueue() > 0) {
throw new QueryException('Queue must be empty, before insertBatch, need executeAsync');
}
if (! in_array($format, self::SUPPORTED_FORMATS, true)) {
throw new QueryException('Format not support in insertBatchFiles');
}
$result = [];
foreach ($fileNames as $fileName) {
if (! is_file($fileName) || ! is_readable($fileName)) {
throw new QueryException('Cant read file: ' . $fileName . ' ' . (is_file($fileName) ? '' : ' is not file'));
}
if (empty($columns)) {
$sql = 'INSERT INTO ' . $tableName . ' FORMAT ' . $format;
} else {
$sql = 'INSERT INTO ' . $tableName . ' ( ' . implode(',', $columns) . ' ) FORMAT ' . $format;
}
$result[$fileName] = $this->transport()->writeAsyncCSV($sql, $fileName);
}
// exec
$this->executeAsync();
// fetch resutl
foreach ($fileNames as $fileName) {
if (! $result[$fileName]->isError()) {
continue;
}
$result[$fileName]->error();
}
return $result;
}
/**
* insert Batch Stream
*
* @param string[] $columns
* @param string $format ['TabSeparated','TabSeparatedWithNames','CSV','CSVWithNames']
* @return Transport\CurlerRequest
*/
public function insertBatchStream(string $tableName, array $columns = [], string $format = 'CSV')
{
if ($this->getCountPendingQueue() > 0) {
throw new QueryException('Queue must be empty, before insertBatch, need executeAsync');
}
if (! in_array($format, self::SUPPORTED_FORMATS, true)) {
throw new QueryException('Format not support in insertBatchFiles');
}
if (empty($columns)) {
$sql = 'INSERT INTO ' . $tableName . ' FORMAT ' . $format;
} else {
$sql = 'INSERT INTO ' . $tableName . ' ( ' . implode(',', $columns) . ' ) FORMAT ' . $format;
}
return $this->transport()->writeStreamData($sql);
}
/**
* stream Write
*
* @param string[] $bind
* @return Statement
* @throws Exception\TransportException
*/
public function streamWrite(Stream $stream, string $sql, array $bind = [])
{
if ($this->getCountPendingQueue() > 0) {
throw new QueryException('Queue must be empty, before streamWrite');
}
return $this->transport()->streamWrite($stream, $sql, $bind);
}
/**
* stream Read
*
* @param string[] $bind
* @return Statement
*/
public function streamRead(Stream $streamRead, string $sql, array $bind = [])
{
if ($this->getCountPendingQueue() > 0) {
throw new QueryException('Queue must be empty, before streamWrite');
}
return $this->transport()->streamRead($streamRead, $sql, $bind);
}
/**
* Size of database
*
* @return mixed|null
*/
public function databaseSize()
{
$b = $this->settings()->getDatabase();
return $this->select(
'
SELECT database,formatReadableSize(sum(bytes)) as size
FROM system.parts
WHERE active AND database=:database
GROUP BY database
',
['database' => $b]
)->fetchOne();
}
/**
* Size of tables
*
* @return mixed
*/
public function tableSize(string $tableName)
{
$tables = $this->tablesSize();
if (isset($tables[$tableName])) {
return $tables[$tableName];
}
return null;
}
/**
* Ping server
*
* @return bool
*/
public function ping()
{
return $this->transport()->ping();
}
/**
* Tables sizes
*
* @param bool $flatList
* @return mixed[][]
*/
public function tablesSize($flatList = false)
{
$result = $this->select('
SELECT name as table,database,
max(sizebytes) as sizebytes,
max(size) as size,
min(min_date) as min_date,
max(max_date) as max_date
FROM system.tables
ANY LEFT JOIN
(
SELECT table,database,
formatReadableSize(sum(bytes)) as size,
sum(bytes) as sizebytes,
min(min_date) as min_date,
max(max_date) as max_date
FROM system.parts
WHERE active AND database=:database
GROUP BY table,database
) USING ( table,database )
WHERE database=:database
GROUP BY table,database
',
['database' => $this->settings()->getDatabase()]);
if ($flatList) {
return $result->rows();
}
return $result->rowsAsTree('table');
}
/**
* isExists
*
* @return array
*/
public function isExists(string $database, string $table)
{
return $this->select(
'
SELECT *
FROM system.tables
WHERE name=\'' . $table . '\' AND database=\'' . $database . '\''
)->rowsAsTree('name');
}
/**
* List of partitions
*
* @return mixed[][]
*/
public function partitions(string $table, int $limit = null, bool $active = null)
{
$database = $this->settings()->getDatabase();
$whereActiveClause = $active === null ? '' : sprintf(' AND active = %s', (int) $active);
$limitClause = $limit !== null ? ' LIMIT ' . $limit : '';
return $this->select(<<<CLICKHOUSE
SELECT *
FROM system.parts
WHERE like(table,'%$table%') AND database='$database'$whereActiveClause
ORDER BY max_date $limitClause
CLICKHOUSE
)->rowsAsTree('name');
}
/**
* dropPartition
* @deprecated
* @return Statement
*/
public function dropPartition(string $dataBaseTableName, string $partition_id)
{
$partition_id = trim($partition_id, '\'');
$this->settings()->set('replication_alter_partitions_sync', 2);
$state = $this->write('ALTER TABLE {dataBaseTableName} DROP PARTITION :partion_id',
[
'dataBaseTableName' => $dataBaseTableName,
'partion_id' => $partition_id,
]);
return $state;
}
/**
* Truncate ( drop all partitions )
* @deprecated
* @return array
*/
public function truncateTable(string $tableName)
{
$partions = $this->partitions($tableName);
$out = [];
foreach ($partions as $part_key => $part) {
$part_id = $part['partition'];
$out[$part_id] = $this->dropPartition($tableName, $part_id);
}
return $out;
}
/**
* Returns the server's uptime in seconds.
*
* @return int
* @throws Exception\TransportException
*/
public function getServerUptime()
{
return $this->select('SELECT uptime() as uptime')->fetchOne('uptime');
}
/**
* Returns string with the server version.
*/
public function getServerVersion() : string
{
return (string) $this->select('SELECT version() as version')->fetchOne('version');
}
/**
* Read system.settings table
*
* @return mixed[][]
*/
public function getServerSystemSettings(string $like = '')
{
$l = [];
$list = $this->select('SELECT * FROM system.settings' . ($like ? ' WHERE name LIKE :like' : ''),
['like' => '%' . $like . '%'])->rows();
foreach ($list as $row) {
if (isset($row['name'])) {
$n = $row['name'];
unset($row['name']);
$l[$n] = $row;
}
}
return $l;
}
/**
* dropOldPartitions by day_ago
* @deprecated
*
* @return array
* @throws Exception\TransportException
* @throws \Exception
*/
public function dropOldPartitions(string $table_name, int $days_ago, int $count_partitons_per_one = 100)
{
$days_ago = strtotime(date('Y-m-d 00:00:00', strtotime('-' . $days_ago . ' day')));
$drop = [];
$list_patitions = $this->partitions($table_name, $count_partitons_per_one);
foreach ($list_patitions as $partion_id => $partition) {
if (stripos($partition['engine'], 'mergetree') === false) {
continue;
}
// $min_date = strtotime($partition['min_date']);
$max_date = strtotime($partition['max_date']);
if ($max_date < $days_ago) {
$drop[] = $partition['partition'];
}
}
$result = [];
foreach ($drop as $partition_id) {
$result[$partition_id] = $this->dropPartition($table_name, $partition_id);
}
return $result;
}
}

View File

@ -0,0 +1,626 @@
<?php
namespace ClickHouseDB;
use ClickHouseDB\Exception\QueryException;
class Cluster
{
/**
* @var array
*/
private $nodes = [];
/**
* @var Client[]
*/
private $clients = [];
/**
* @var Client
*/
private $defaultClient;
/**
* @var array
*/
private $badNodes = [];
/**
* @var array|bool
*/
private $error = [];
/**
* @var array
*/
private $resultScan = [];
/**
* @var string
*/
private $defaultHostName;
/**
* @var int|float
*/
private $scanTimeOut = 10;
/**
* @var array
*/
private $tables = [];
/**
* @var array
*/
private $hostsnames = [];
/**
* @var bool
*/
private $isScaned = false;
/**
* A symptom of straining CH when checking a cluster request in Zookiper
 * false - send a request to ZK, do not do SELECT * FROM system.replicas
*
* @var bool
*/
private $softCheck = true;
/**
* @var bool
*/
private $replicasIsOk;
/**
* Cache
*
* @var array
*/
private $_table_size_cache = [];
/**
* Cluster constructor.
*
* @param array $connect_params
* @param array $settings
*/
public function __construct($connect_params, $settings = [])
{
$this->defaultClient = new Client($connect_params, $settings);
$this->defaultHostName = $this->defaultClient->getConnectHost();
$this->setNodes(gethostbynamel($this->defaultHostName));
}
/**
* @return Client
*/
private function defaultClient()
{
return $this->defaultClient;
}
/**
* @param bool $softCheck
*/
public function setSoftCheck($softCheck)
{
$this->softCheck = $softCheck;
}
/**
* @param float|integer $scanTimeOut
*/
public function setScanTimeOut($scanTimeOut)
{
$this->scanTimeOut = $scanTimeOut;
}
/**
* @param array $nodes
*/
public function setNodes($nodes)
{
$this->nodes = $nodes;
}
/**
* @return array
*/
public function getNodes()
{
return $this->nodes;
}
/**
* @return array
*/
public function getBadNodes()
{
return $this->badNodes;
}
/**
* Connect all nodes and scan
*
* @return $this
* @throws Exception\TransportException
*/
public function connect()
{
if (!$this->isScaned) {
$this->rescan();
}
return $this;
}
/**
* Check the status of the cluster, the request is taken from the documentation for CH
 * total_replicas <2 - not suitable for no replication clusters
*
*
* @param mixed $replicas
* @return bool
*/
private function isReplicasWork($replicas)
{
$ok = true;
if (!is_array($replicas)) {
// @todo нет массива ошибка, т/к мы работем с репликами?
// @todo Как быть есть в кластере НЕТ реплик ?
return false;
}
foreach ($replicas as $replica) {
if ($replica['is_readonly']) {
$ok = false;
$this->error[] = 'is_readonly : ' . json_encode($replica);
}
if ($replica['is_session_expired']) {
$ok = false;
$this->error[] = 'is_session_expired : ' . json_encode($replica);
}
if ($replica['future_parts'] > 20) {
$ok = false;
$this->error[] = 'future_parts : ' . json_encode($replica);
}
if ($replica['parts_to_check'] > 10) {
$ok = false;
$this->error[] = 'parts_to_check : ' . json_encode($replica);
}
// @todo : rewrite total_replicas=1 если кластер без реплики , нужно проверять какой класте и сколько в нем реплик
// if ($replica['total_replicas']<2) {$ok=false;$this->error[]='total_replicas : '.json_encode($replica);}
if ($this->softCheck)
{
if (!$ok) {
break;
}
continue;
}
if ($replica['active_replicas'] < $replica['total_replicas']) {
$ok = false;
$this->error[] = 'active_replicas : ' . json_encode($replica);
}
if ($replica['queue_size'] > 20) {
$ok = false;
$this->error[] = 'queue_size : ' . json_encode($replica);
}
if (($replica['log_max_index'] - $replica['log_pointer']) > 10) {
$ok = false;
$this->error[] = 'log_max_index : ' . json_encode($replica);
}
if (!$ok) {
break;
}
}
return $ok;
}
private function getSelectSystemReplicas()
{
// If you query all the columns, then the table may work slightly slow, since there are several readings from ZK per line.
// If you do not query the last 4 columns (log_max_index, log_pointer, total_replicas, active_replicas), then the table works quickly. if ($this->softCheck)
return 'SELECT
database,table,engine,is_leader,is_readonly,
is_session_expired,future_parts,parts_to_check,zookeeper_path,replica_name,replica_path,columns_version,
queue_size,inserts_in_queue,merges_in_queue,queue_oldest_time,inserts_oldest_time,merges_oldest_time
FROM system.replicas
';
// return 'SELECT * FROM system.replicas';
}
/**
* @return $this
* @throws Exception\TransportException
*/
public function rescan()
{
$this->error = [];
/*
* 1) Get the IP list
* 2) To each connect via IP, through activeClient replacing host on ip
         * 3) We get information system.clusters + system.replicas from each machine, overwrite {DnsCache + timeOuts}
         * 4) Determine the necessary machines for the cluster / replica
* 5) .... ?
         */
$statementsReplicas = [];
$statementsClusters = [];
$result = [];
$badNodes = [];
$replicasIsOk = true;
foreach ($this->nodes as $node) {
$this->defaultClient()->setHost($node);
$statementsReplicas[$node] = $this->defaultClient()->selectAsync($this->getSelectSystemReplicas());
$statementsClusters[$node] = $this->defaultClient()->selectAsync('SELECT * FROM system.clusters');
// пересетапим timeout
$statementsReplicas[$node]->getRequest()->setDnsCache(0)->timeOut($this->scanTimeOut)->connectTimeOut($this->scanTimeOut);
$statementsClusters[$node]->getRequest()->setDnsCache(0)->timeOut($this->scanTimeOut)->connectTimeOut($this->scanTimeOut);
}
$this->defaultClient()->executeAsync();
$tables = [];
foreach ($this->nodes as $node) {
try {
$r = $statementsReplicas[$node]->rows();
foreach ($r as $row) {
$tables[$row['database']][$row['table']][$node] = $row;
}
$result['replicas'][$node] = $r;
}catch (\Exception $E) {
$result['replicas'][$node] = false;
$badNodes[$node] = $E->getMessage();
$this->error[] = 'statementsReplicas:' . $E->getMessage();
}
// ---------------------------------------------------------------------------------------------------
$hosts = [];
try {
$c = $statementsClusters[$node]->rows();
$result['clusters'][$node] = $c;
foreach ($c as $row) {
$hosts[$row['host_address']][$row['port']] = $row['host_name'];
$result['cluster.list'][$row['cluster']][$row['host_address']] =
[
'shard_weight' => $row['shard_weight'],
'replica_num' => $row['replica_num'],
'shard_num' => $row['shard_num'],
'is_local' => $row['is_local']
];
}
}catch (\Exception $E) {
$result['clusters'][$node] = false;
$this->error[] = 'clusters:' . $E->getMessage();
$badNodes[$node] = $E->getMessage();
}
$this->hostsnames = $hosts;
$this->tables = $tables;
// ---------------------------------------------------------------------------------------------------
// Let's check that replication goes well
$rIsOk = $this->isReplicasWork($result['replicas'][$node]);
$result['replicasIsOk'][$node] = $rIsOk;
if (!$rIsOk) {
$replicasIsOk = false;
}
// ---------------------------------------------------------------------------------------------------
}
// badNodes = array(6) { '222.222.222.44' => string(13) "HttpCode:0 ; " , '222.222.222.11' => string(13) "HttpCode:0 ; "
$this->badNodes = $badNodes;
// Restore DNS host name on ch_client
$this->defaultClient()->setHost($this->defaultHostName);
$this->isScaned = true;
$this->replicasIsOk = $replicasIsOk;
$this->error[] = "Bad replicasIsOk, in " . json_encode($result['replicasIsOk']);
// ------------------------------------------------
// @todo : To specify on fighting falls and at different-sided configurations ...
if (sizeof($this->badNodes)) {
$this->error[] = 'Have bad node : ' . json_encode($this->badNodes);
$this->replicasIsOk = false;
}
if (!sizeof($this->error)) {
$this->error = false;
}
$this->resultScan = $result;
// @todo : We connect to everyone in the DNS list, we need to decry that the requests were returned by all the hosts to which we connected
return $this;
}
/**
* @return boolean
* @throws Exception\TransportException
*/
public function isReplicasIsOk()
{
return $this->connect()->replicasIsOk;
}
/**
* @param string $node
* @return Client
*/
public function client($node)
{
// Создаем клиенты под каждый IP
if (empty($this->clients[$node])) {
$this->clients[$node] = clone $this->defaultClient();
}
$this->clients[$node]->setHost($node);
return $this->clients[$node];
}
/**
* @return Client
* @throws Exception\TransportException
*/
public function clientLike($cluster, $ip_addr_like)
{
$nodes_check = $this->nodes;
$nodes = $this->getClusterNodes($cluster);
$list_ips_need = explode(';', $ip_addr_like);
$find = false;
foreach ($list_ips_need as $like)
{
foreach ($nodes as $node)
{
if (stripos($node, $like) !== false)
{
if (in_array($node, $nodes_check))
{
$find = $node;
} else
{
// node exists on cluster, but not check
}
}
if ($find) {
break;
}
}
if ($find) {
break;
}
}
if (!$find) {
$find = $nodes[0];
}
return $this->client($find);
}
/**
* @return Client
*/
public function activeClient()
{
return $this->client($this->nodes[0]);
}
/**
* @paramstring $cluster
* @return int
* @throws Exception\TransportException
*/
public function getClusterCountShard($cluster)
{
$table = $this->getClusterInfoTable($cluster);
$c = [];
foreach ($table as $row) {
$c[$row['shard_num']] = 1;
}
return sizeof($c);
}
/**
* @paramstring $cluster
* @return int
* @throws Exception\TransportException
*/
public function getClusterCountReplica($cluster)
{
$table = $this->getClusterInfoTable($cluster);
$c = [];
foreach ($table as $row) {
$c[$row['replica_num']] = 1;
}
return sizeof($c);
}
/**
* @paramstring $cluster
* @return mixed
* @throws Exception\TransportException
*/
public function getClusterInfoTable($cluster)
{
$this->connect();
if (empty($this->resultScan['cluster.list'][$cluster])) {
throw new QueryException('Cluster not find:' . $cluster);
}
return $this->resultScan['cluster.list'][$cluster];
}
/**
* @paramstring $cluster
* @return array
* @throws Exception\TransportException
*/
public function getClusterNodes($cluster)
{
return array_keys($this->getClusterInfoTable($cluster));
}
/**
* @return array
* @throws Exception\TransportException
*/
public function getClusterList()
{
$this->connect();
return array_keys($this->resultScan['cluster.list']);
}
/**
* list all tables on all nodes
*
* @return array
* @throws Exception\TransportException
*/
public function getTables($resultDetail = false)
{
$this->connect();
$list = [];
foreach ($this->tables as $db_name=>$tables)
{
foreach ($tables as $table_name=>$nodes)
{
if ($resultDetail)
{
$list[$db_name . '.' . $table_name] = $nodes;
} else
{
$list[$db_name . '.' . $table_name] = array_keys($nodes);
}
}
}
return $list;
}
/**
* Table size on cluster
*
* @param string $database_table
* @return array|null
*
* @throws Exception\TransportException
*/
public function getSizeTable($database_table)
{
$nodes = $this->getNodesByTable($database_table);
// scan need node`s
foreach ($nodes as $node)
{
if (empty($this->_table_size_cache[$node]))
{
$this->_table_size_cache[$node] = $this->client($node)->tablesSize(true);
}
}
$sizes = [];
foreach ($this->_table_size_cache as $node=>$rows)
{
foreach ($rows as $row)
{
$sizes[$row['database'] . '.' . $row['table']][$node] = $row;
@$sizes[$row['database'] . '.' . $row['table']]['total']['sizebytes'] += $row['sizebytes'];
}
}
if (empty($sizes[$database_table]))
{
return null;
}
return $sizes[$database_table]['total']['sizebytes'];
}
/**
* Truncate on all nodes
* @deprecated
* @param string $database_table
* @return array
* @throws Exception\TransportException
*/
public function truncateTable($database_table, $timeOut = 2000)
{
$out = [];
list($db, $table) = explode('.', $database_table);
$nodes = $this->getMasterNodeForTable($database_table);
// scan need node`s
foreach ($nodes as $node)
{
$def = $this->client($node)->getTimeout();
$this->client($node)->database($db)->setTimeout($timeOut);
$out[$node] = $this->client($node)->truncateTable($table);
$this->client($node)->setTimeout($def);
}
return $out;
}
/**
* is_leader node
*
* @param string $database_table
* @return array
* @throws Exception\TransportException
*/
public function getMasterNodeForTable($database_table)
{
$list = $this->getTables(true);
if (empty($list[$database_table])) {
return [];
}
$result = [];
foreach ($list[$database_table] as $node=>$row)
{
if ($row['is_leader']) {
$result[] = $node;
}
}
return $result;
}
/**
* Find nodes by : db_name.table_name
*
* @param string $database_table
* @return array
* @throws Exception\TransportException
*/
public function getNodesByTable($database_table)
{
$list = $this->getTables();
if (empty($list[$database_table])) {
throw new QueryException('Not find :' . $database_table);
}
return $list[$database_table];
}
/**
* Error string
*
* @return string|bool
*/
public function getError()
{
if (is_array($this->error)) {
return json_encode($this->error);
}
return $this->error;
}
}

View File

@ -0,0 +1,9 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Exception;
interface ClickHouseException
{
}

View File

@ -0,0 +1,9 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Exception;
final class DatabaseException extends QueryException implements ClickHouseException
{
}

View File

@ -0,0 +1,15 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Exception;
use LogicException;
class QueryException extends LogicException implements ClickHouseException
{
public static function cannotInsertEmptyValues() : self
{
return new self('Inserting empty values array is not supported in ClickHouse');
}
}

View File

@ -0,0 +1,11 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Exception;
use LogicException;
final class TransportException extends LogicException implements ClickHouseException
{
}

View File

@ -0,0 +1,20 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Exception;
use InvalidArgumentException;
use function gettype;
use function sprintf;
final class UnsupportedValueType extends InvalidArgumentException implements ClickHouseException
{
/**
* @param mixed $parameter
*/
public static function new($parameter) : self
{
return new self(sprintf('Parameter of type "%s" cannot be bound', gettype($parameter)));
}
}

View File

@ -0,0 +1,8 @@
<?php
namespace ClickHouseDB\Query;
interface Degeneration
{
public function process($sql);
public function bindParams(array $bindings);
}

View File

@ -0,0 +1,106 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Query\Degeneration;
use ClickHouseDB\Query\Degeneration;
use ClickHouseDB\Quote\ValueFormatter;
use function array_map;
use function implode;
use function is_array;
class Bindings implements Degeneration
{
/**
* @var array
*/
protected $bindings = [];
/**
* @param array $bindings
*/
public function bindParams(array $bindings)
{
$this->bindings = [];
foreach ($bindings as $column => $value) {
$this->bindParam($column, $value);
}
}
/**
* @param string $column
* @param mixed $value
*/
public function bindParam($column, $value)
{
$this->bindings[$column] = $value;
}
/**
* Binds a list of values to the corresponding parameters.
* This is similar to [[bindValue()]] except that it binds multiple values at a time.
*
* @param string $sql
* @param array $binds
* @param string $pattern
* @return string
*/
public function compile_binds($sql, $binds,$pattern)
{
return preg_replace_callback($pattern, function($m) use ($binds){
if(isset($binds[$m[1]])){ // If it exists in our array
return $binds[$m[1]]; // Then replace it from our array
}
return $m[0]; // Otherwise return the whole match (basically we won't change it)
}, $sql);
}
/**
* Compile Bindings
*
* @param string $sql
* @return mixed
*/
public function process($sql)
{
$bindFormatted=[];
$bindRaw=[];
foreach ($this->bindings as $key => $value) {
if (is_array($value)) {
$valueSet = implode(', ', $value);
$values = array_map(
function ($value) {
return ValueFormatter::formatValue($value);
},
$value
);
$formattedParameter = implode(',', $values);
} else {
$valueSet = $value;
$formattedParameter = ValueFormatter::formatValue($value);
}
if ($formattedParameter !== null) {
$bindFormatted[$key]=$formattedParameter;
}
if ($valueSet !== null) {
$bindRaw[$key]=$valueSet;
}
}
for ($loop=0;$loop<2;$loop++)
{
// dipping in binds
// example ['A' => '{B}' , 'B'=>':C','C'=>123]
$sql=$this->compile_binds($sql,$bindRaw,'#{([\w+]+)}#');
}
$sql=$this->compile_binds($sql,$bindFormatted,'#:([\w+]+)#');
return $sql;
}
}

View File

@ -0,0 +1,97 @@
<?php
namespace ClickHouseDB\Query\Degeneration;
use ClickHouseDB\Query\Degeneration;
class Conditions implements Degeneration
{
/**
* @var array
*/
protected $bindings = [];
/**
* @param array $bindings
*/
public function bindParams(array $bindings)
{
foreach ($bindings as $column => $value) {
$this->bindings[$column] = $value;
}
}
static function __ifsets($matches, $markers, $else = false)
{
$content_false = '';
if ($else)
{
list($condition, $preset, $variable, $content_true, $content_false) = $matches;
} else
{
list($condition, $preset, $variable, $content_true) = $matches;
}
$preset = strtolower($preset);
if ($preset == 'set')
{
return (isset($markers[$variable]) && !empty($markers[$variable])) ? $content_true : $content_false;
}
if ($preset == 'bool')
{
return (isset($markers[$variable]) && is_bool($markers[$variable]) && $markers[$variable] == true)
? $content_true
: $content_false;
}
if ($preset == 'string')
{
return (isset($markers[$variable]) && is_string($markers[$variable]) && strlen($markers[$variable]))
? $content_true
: $content_false;
}
if ($preset == 'int')
{
return (isset($markers[$variable]) && intval($markers[$variable]) <> 0)
? $content_true
: $content_false;
}
return '';
}
/**
* @param string $sql
* @return mixed
*/
public function process($sql)
{
$markers = $this->bindings;
// 2. process if/else conditions
$sql = preg_replace_callback('#\{if\s(.+?)}(.+?)\{else}(.+?)\{/if}#sui', function($matches) use ($markers) {
list($condition, $variable, $content_true, $content_false) = $matches;
return (isset($markers[$variable]) && ($markers[$variable] || is_numeric($markers[$variable])))
? $content_true
: $content_false;
}, $sql);
// 3. process if conditions
$sql = preg_replace_callback('#\{if\s(.+?)}(.+?)\{/if}#sui', function($matches) use ($markers) {
list($condition, $variable, $content) = $matches;
if (isset($markers[$variable]) && ($markers[$variable] || is_numeric($markers[$variable]))) {
return $content;
}
}, $sql);
// 1. process if[set|int]/else conditions
$sql = preg_replace_callback('#\{if(.{1,}?)\s(.+?)}(.+?)\{else}(.+?)\{/if}#sui', function($matches) use ($markers) {return self::__ifsets($matches, $markers, true); }, $sql);
$sql = preg_replace_callback('#\{if(.{1,}?)\s(.+?)}(.+?)\{/if}#sui', function($matches) use ($markers) { return self::__ifsets($matches, $markers, false); }, $sql);
return $sql;
}
}

View File

@ -0,0 +1,114 @@
<?php
namespace ClickHouseDB\Query;
use ClickHouseDB\Exception\QueryException;
use function sizeof;
class Query
{
/**
* @var string
*/
protected $sql;
/**
* @var string|null
*/
protected $format = null;
/**
* @var array
*/
private $degenerations = [];
/**
* Query constructor.
* @param string $sql
* @param array $degenerations
*/
public function __construct($sql, $degenerations = [])
{
if (!trim($sql))
{
throw new QueryException('Empty Query');
}
$this->sql = $sql;
$this->degenerations = $degenerations;
}
/**
* @param string|null $format
*/
public function setFormat($format)
{
$this->format = $format;
}
private function applyFormatQuery()
{
// FORMAT\s(\w)*$
if (null === $this->format) {
return false;
}
$supportFormats =
"FORMAT\\s+TSV|FORMAT\\s+TSVRaw|FORMAT\\s+TSVWithNames|FORMAT\\s+TSVWithNamesAndTypes|FORMAT\\s+Vertical|FORMAT\\s+JSONCompact|FORMAT\\s+JSONEachRow|FORMAT\\s+TSKV|FORMAT\\s+TabSeparatedWithNames|FORMAT\\s+TabSeparatedWithNamesAndTypes|FORMAT\\s+TabSeparatedRaw|FORMAT\\s+BlockTabSeparated|FORMAT\\s+CSVWithNames|FORMAT\\s+CSV|FORMAT\\s+JSON|FORMAT\\s+TabSeparated";
$matches = [];
if (preg_match_all('%(' . $supportFormats . ')%ius', $this->sql, $matches)) {
// skip add "format json"
if (isset($matches[0]))
{
$this->format = trim(str_ireplace('format', '', $matches[0][0]));
}
} else {
$this->sql = $this->sql . ' FORMAT ' . $this->format;
}
}
/**
* @return null|string
*/
public function getFormat()
{
return $this->format;
}
public function toSql()
{
if ($this->format !== null) {
$this->applyFormatQuery();
}
if (sizeof($this->degenerations))
{
foreach ($this->degenerations as $degeneration)
{
if ($degeneration instanceof Degeneration) {
$this->sql = $degeneration->process($this->sql);
}
}
}
return $this->sql;
}
/**
* @return string
*/
public function __toString()
{
return $this->toSql();
}
}

View File

@ -0,0 +1,98 @@
<?php
namespace ClickHouseDB\Query;
use ClickHouseDB\Exception\QueryException;
class WhereInFile
{
/**
*
*/
const FORMAT_TabSeparated = 'TabSeparated';
const FORMAT_TabSeparatedWithNames = 'TabSeparatedWithNames';
const FORMAT_CSV = 'CSV';
/**
* @var array
*/
private $_files = [];
/**
* WhereInFile constructor.
*/
public function __construct() {}
/**
* @param string $file_name
* @param string $table_name
* @param string $structure
* @param string $format
*/
public function attachFile($file_name, $table_name, $structure, $format = 'CSV')
{
if (!is_readable($file_name)) {
throw new QueryException('Can`t read file: ' . $file_name);
}
$this->_files[$table_name] = [
'filename' => $file_name,
'structure' => $structure,
'format' => $format
];
}
/**
* @return int
*/
public function size()
{
return sizeof($this->_files);
}
/**
* @return array
*/
public function fetchFiles()
{
$out = [];
foreach ($this->_files as $table => $data) {
$out[$table] = realpath($data['filename']);
}
return $out;
}
/**
* @param string $table
* @return string
*/
public function fetchStructure($table)
{
$structure = $this->_files[$table]['structure'];
$out = [];
foreach ($structure as $name => $type) {
$out[] = $name . ' ' . $type;
}
return implode(',', $out);
}
/**
* @return array
*/
public function fetchUrlParams()
{
$out = [];
foreach ($this->_files as $table => $data) {
$out[$table . '_structure'] = $this->fetchStructure($table);
$out[$table . '_format'] = $data['format'];
}
return $out;
}
}

View File

@ -0,0 +1,119 @@
<?php
namespace ClickHouseDB\Query;
use ClickHouseDB\Exception\QueryException;
class WriteToFile
{
/**
*
*/
const FORMAT_TabSeparated = 'TabSeparated';
const FORMAT_TabSeparatedWithNames = 'TabSeparatedWithNames';
const FORMAT_CSV = 'CSV';
private $support_format = ['TabSeparated', 'TabSeparatedWithNames', 'CSV'];
/**
* @var string
*/
private $file_name = null;
/**
* @var string
*/
private $format = 'CSV';
/**
* @var bool
*/
private $gzip = false;
/**
* WriteToFile constructor.
* @param string $file_name
* @param bool $overwrite
* @param string|null $format
*/
public function __construct($file_name, $overwrite = true, $format = null) {
if (!$file_name)
{
throw new QueryException('Bad file path');
}
if (is_file($file_name))
{
if (!$overwrite)
{
throw new QueryException('File exists: ' . $file_name);
}
if (!unlink($file_name))
{
throw new QueryException('Can`t delete: ' . $file_name);
}
}
$dir = dirname($file_name);
if (!is_writable($dir))
{
throw new QueryException('Can`t writable dir: ' . $dir);
}
if (is_string($format))
{
$this->setFormat($format);
}
$this->file_name = $file_name;
}
/**
* @return bool
*/
public function getGzip()
{
return $this->gzip;
}
/**
* @param bool $flag
*/
public function setGzip($flag)
{
$this->gzip = $flag;
}
/**
* @param string $format
*/
public function setFormat($format)
{
if (!in_array($format, $this->support_format))
{
throw new QueryException('Unsupport format: ' . $format);
}
$this->format = $format;
}
/**
* @return int
*/
public function size()
{
return filesize($this->file_name);
}
/**
* @return string
*/
public function fetchFile()
{
return $this->file_name;
}
/**
* @return string
*/
public function fetchFormat()
{
return $this->format;
}
}

View File

@ -0,0 +1,14 @@
<?php
namespace ClickHouseDB\Quote;
/**
* @deprecated Left for compatibility
*/
class CSV
{
public static function quoteRow($row)
{
return FormatLine::CSV($row);
}
}

View File

@ -0,0 +1,60 @@
<?php
namespace ClickHouseDB\Quote;
class FormatLine
{
/**
*
* @var array
*/
private static $strict = [];
/**
* Format
*
* @param string $format
* @return StrictQuoteLine
*/
public static function strictQuote($format)
{
if (empty(self::$strict[$format]))
{
self::$strict[$format] = new StrictQuoteLine($format);
}
return self::$strict[$format];
}
/**
* Array in a string for a query Insert
*
* @param mixed[] $row
* @return string
*/
public static function Insert(array $row)
{
return self::strictQuote('Insert')->quoteRow($row);
}
/**
* Array to TSV
*
* @param array $row
* @return string
*/
public static function TSV(Array $row)
{
return self::strictQuote('TSV')->quoteRow($row);
}
/**
* Array to CSV
*
* @param array $row
* @return string
*/
public static function CSV(Array $row)
{
return self::strictQuote('CSV')->quoteRow($row);
}
}

View File

@ -0,0 +1,127 @@
<?php
namespace ClickHouseDB\Quote;
use ClickHouseDB\Exception\QueryException;
use ClickHouseDB\Type\NumericType;
use function array_map;
use function is_array;
use function is_float;
use function is_int;
use function is_string;
use function preg_replace;
use function str_replace;
class StrictQuoteLine
{
private $preset = [
'CSV'=>[
'EnclosureArray'=>'"',
'EncodeEnclosure'=>'"',
'Enclosure'=>'"',
'Null'=>"\\N",
'Delimiter'=>",",
'TabEncode'=>false,
],
'Insert'=>[
'EnclosureArray'=>'',
'EncodeEnclosure'=>'\\',
'Enclosure'=>'\'',
'Null'=>"NULL",
'Delimiter'=>",",
'TabEncode'=>false,
],
'TSV'=>[
'EnclosureArray'=>'',
'EncodeEnclosure'=>'',
'Enclosure'=>'\\',
'Null'=>" ",
'Delimiter'=>"\t",
'TabEncode'=>true,
],
];
private $settings = [];
public function __construct($format)
{
if (empty($this->preset[$format]))
{
throw new QueryException("Unsupport format encode line:" . $format);
}
$this->settings = $this->preset[$format];
}
public function quoteRow($row)
{
return implode($this->settings['Delimiter'], $this->quoteValue($row));
}
public function quoteValue($row)
{
$enclosure = $this->settings['Enclosure'];
$delimiter = $this->settings['Delimiter'];
$encode = $this->settings['EncodeEnclosure'];
$encodeArray = $this->settings['EnclosureArray'];
$null = $this->settings['Null'];
$tabEncode = $this->settings['TabEncode'];
$quote = function($value) use ($enclosure, $delimiter, $encode, $encodeArray, $null, $tabEncode) {
$delimiter_esc = preg_quote($delimiter, '/');
$enclosure_esc = preg_quote($enclosure, '/');
$encode_esc = preg_quote($encode, '/');
$encode = true;
if ($value instanceof NumericType) {
$encode = false;
}
if (is_array($value)) {
// Arrays are formatted as a list of values separated by commas in square brackets.
// Elements of the array - the numbers are formatted as usual, and the dates, dates-with-time, and lines are in
// single quotation marks with the same screening rules as above.
// as in the TabSeparated format, and then the resulting string is output in InsertRow in double quotes.
$value = array_map(
function ($v) use ($enclosure_esc, $encode_esc) {
return is_string($v) ? $this->encodeString($v, $enclosure_esc, $encode_esc) : $v;
},
$value
);
$resultArray = FormatLine::Insert($value);
return $encodeArray . '[' . $resultArray . ']' . $encodeArray;
}
$value = ValueFormatter::formatValue($value, false);
if (is_float($value) || is_int($value)) {
return (string) $value;
}
if (is_string($value) && $encode) {
if ($tabEncode) {
return str_replace(["\t", "\n"], ['\\t', '\\n'], $value);
}
$value = $this->encodeString($value, $enclosure_esc, $encode_esc);
return $enclosure . $value . $enclosure;
}
if ($value === null) {
return $null;
}
return $value;
};
return array_map($quote, $row);
}
/**
* @return string
*/
public function encodeString(string $value, string $enclosureEsc, string $encodeEsc)
{
return preg_replace('/(' . $enclosureEsc . '|' . $encodeEsc . ')/', $encodeEsc . '\1', $value);
}
}

View File

@ -0,0 +1,72 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Quote;
use ClickHouseDB\Exception\UnsupportedValueType;
use ClickHouseDB\Type\Type;
use DateTimeInterface;
use function addslashes;
use function is_bool;
use function is_callable;
use function is_float;
use function is_int;
use function is_object;
use function is_string;
use function sprintf;
class ValueFormatter
{
/**
* @param mixed $value
* @return mixed
*/
public static function formatValue($value, bool $addQuotes = true)
{
if ($value instanceof DateTimeInterface) {
$value = $value->format('Y-m-d H:i:s');
}
if (is_float($value) || is_int($value) || is_bool($value) || $value === null) {
return $value;
}
if ($value instanceof Type) {
return $value->getValue();
}
if (is_object($value) && is_callable([$value, '__toString'])) {
$value = (string) $value;
}
if (is_string($value)) {
if ($addQuotes) {
return self::formatStringParameter(self::escapeString($value));
}
return $value;
}
throw UnsupportedValueType::new($value);
}
/**
* Escape an string
*
* @param string $value
* @return string
*/
private static function escapeString($value)
{
return addslashes($value);
}
/**
* @return string
*/
private static function formatStringParameter($value)
{
return sprintf("'%s'", $value);
}
}

View File

@ -0,0 +1,232 @@
<?php
namespace ClickHouseDB;
use ClickHouseDB\Transport\Http;
class Settings
{
/**
* @var Http
*/
private $client = null;
/**
* @var array
*/
private $settings = [];
private $_ReadOnlyUser = false;
/**
* @var bool
*/
private $_isHttps = false;
/**
* Settings constructor.
* @param Http $client
*/
public function __construct(Http $client)
{
$default = [
'extremes' => false,
'readonly' => true,
'max_execution_time' => 20,
'enable_http_compression' => 0,
'https' => false
];
$this->settings = $default;
$this->client = $client;
}
/**
* @param string|int $key
* @return mixed
*/
public function get($key)
{
if (!$this->is($key)) {
return null;
}
return $this->settings[$key];
}
/**
* @param string|int $key
* @return bool
*/
public function is($key)
{
return isset($this->settings[$key]);
}
/**
* @param string|int $key
* @param mixed $value
* @return $this
*/
public function set($key, $value)
{
$this->settings[$key] = $value;
return $this;
}
/**
* @return mixed
*/
public function getDatabase()
{
return $this->get('database');
}
/**
* @param string $db
* @return $this
*/
public function database($db)
{
$this->set('database', $db);
return $this;
}
/**
* @return mixed
*/
public function getTimeOut()
{
return $this->get('max_execution_time');
}
/**
* @return mixed|null
*/
public function isEnableHttpCompression()
{
return $this->getSetting('enable_http_compression');
}
/**
* @param bool|int $flag
* @return $this
*/
public function enableHttpCompression($flag)
{
$this->set('enable_http_compression', intval($flag));
return $this;
}
public function https($flag = true)
{
$this->set('https', $flag);
return $this;
}
public function isHttps()
{
return $this->get('https');
}
/**
* @param int|bool $flag
* @return $this
*/
public function readonly($flag)
{
$this->set('readonly', $flag);
return $this;
}
/**
* @param string $session_id
* @return $this
*/
public function session_id($session_id)
{
$this->set('session_id', $session_id);
return $this;
}
/**
* @return mixed|bool
*/
public function getSessionId()
{
if (empty($this->settings['session_id'])) {
return false;
}
return $this->get('session_id');
}
/**
* @return string|bool
*/
public function makeSessionId()
{
$this->session_id(sha1(uniqid('', true)));
return $this->getSessionId();
}
/**
* @param int $time
* @return $this
*/
public function max_execution_time($time)
{
$this->set('max_execution_time', $time);
return $this;
}
/**
* @return array
*/
public function getSettings()
{
return $this->settings;
}
/**
* @param array $settings_array
* @return $this
*/
public function apply($settings_array)
{
foreach ($settings_array as $key => $value) {
$this->set($key, $value);
}
return $this;
}
/**
* @param int|bool $flag
*/
public function setReadOnlyUser($flag)
{
$this->_ReadOnlyUser = $flag;
}
/**
* @return bool
*/
public function isReadOnlyUser()
{
return $this->_ReadOnlyUser;
}
/**
* @param string $name
* @return mixed|null
*/
public function getSetting($name)
{
if (!isset($this->settings[$name])) {
return null;
}
return $this->get($name);
}
}

View File

@ -0,0 +1,534 @@
<?php
namespace ClickHouseDB;
use ClickHouseDB\Exception\DatabaseException;
use ClickHouseDB\Exception\QueryException;
use ClickHouseDB\Query\Query;
use ClickHouseDB\Transport\CurlerRequest;
use ClickHouseDB\Transport\CurlerResponse;
class Statement
{
/**
* @var string|mixed
*/
private $_rawData;
/**
* @var int
*/
private $_http_code = -1;
/**
* @var CurlerRequest
*/
private $_request = null;
/**
* @var bool
*/
private $_init = false;
/**
* @var Query
*/
private $query;
/**
* @var mixed
*/
private $format;
/**
* @var string
*/
private $sql = '';
/**
* @var array
*/
private $meta;
/**
* @var array
*/
private $totals;
/**
* @var array
*/
private $extremes;
/**
* @var int
*/
private $rows;
/**
* @var bool|integer
*/
private $rows_before_limit_at_least = false;
/**
* @var array
*/
private $array_data = [];
/**
* @var array|null
*/
private $statistics = null;
public function __construct(CurlerRequest $request)
{
$this->_request = $request;
$this->format = $this->_request->getRequestExtendedInfo('format');
$this->query = $this->_request->getRequestExtendedInfo('query');
$this->sql = $this->_request->getRequestExtendedInfo('sql');
}
/**
* @return CurlerRequest
*/
public function getRequest()
{
return $this->_request;
}
/**
* @return CurlerResponse
* @throws Exception\TransportException
*/
private function response()
{
return $this->_request->response();
}
/**
* @return mixed
* @throws Exception\TransportException
*/
public function responseInfo()
{
return $this->response()->info();
}
/**
* @return mixed|string
*/
public function sql()
{
return $this->sql;
}
/**
* @param string $body
* @return array|bool
*/
private function parseErrorClickHouse($body)
{
$body = trim($body);
$mathes = [];
// Code: 115, e.displayText() = DB::Exception: Unknown setting readonly[0], e.what() = DB::Exception
// Code: 192, e.displayText() = DB::Exception: Unknown user x, e.what() = DB::Exception
// Code: 60, e.displayText() = DB::Exception: Table default.ZZZZZ doesn't exist., e.what() = DB::Exception
if (preg_match("%Code: (\d+),\se\.displayText\(\) \=\s*DB\:\:Exception\s*:\s*(.*)\,\s*e\.what.*%ius", $body, $mathes)) {
return ['code' => $mathes[1], 'message' => $mathes[2]];
}
return false;
}
/**
* @return bool
* @throws Exception\TransportException
*/
public function error()
{
if (!$this->isError()) {
return false;
}
$body = $this->response()->body();
$error_no = $this->response()->error_no();
$error = $this->response()->error();
if (!$error_no && !$error) {
$parse = $this->parseErrorClickHouse($body);
if ($parse) {
throw new DatabaseException($parse['message'] . "\nIN:" . $this->sql(), $parse['code']);
} else {
$code = $this->response()->http_code();
$message = "HttpCode:" . $this->response()->http_code() . " ; " . $this->response()->error() . " ;" . $body;
}
} else {
$code = $error_no;
$message = $this->response()->error();
}
throw new QueryException($message, $code);
}
/**
* @return bool
* @throws Exception\TransportException
*/
public function isError()
{
return ($this->response()->http_code() !== 200 || $this->response()->error_no());
}
/**
* @return bool
* @throws Exception\TransportException
*/
private function check()
{
if (!$this->_request->isResponseExists()) {
throw new QueryException('Not have response');
}
if ($this->isError()) {
$this->error();
}
return true;
}
/**
* @return bool
* @throws Exception\TransportException
*/
private function init()
{
if ($this->_init) {
return false;
}
$this->check();
$this->_rawData = $this->response()->rawDataOrJson($this->format);
if (!$this->_rawData) {
$this->_init = true;
return false;
}
$data=[];
foreach (['meta', 'data', 'totals', 'extremes', 'rows', 'rows_before_limit_at_least', 'statistics'] as $key) {
if (isset($this->_rawData[$key])) {
if ($key=='data')
{
$data=$this->_rawData[$key];
}
else{
$this->{$key} = $this->_rawData[$key];
}
}
}
if (empty($this->meta)) {
throw new QueryException('Can`t find meta');
}
$isJSONCompact=(stripos($this->format,'JSONCompact')!==false?true:false);
$this->array_data = [];
foreach ($data as $rows) {
$r = [];
if ($isJSONCompact)
{
$r[]=$rows;
}
else {
foreach ($this->meta as $meta) {
$r[$meta['name']] = $rows[$meta['name']];
}
}
$this->array_data[] = $r;
}
return true;
}
/**
* @return array
* @throws \Exception
*/
public function extremes()
{
$this->init();
return $this->extremes;
}
/**
* @return mixed
* @throws Exception\TransportException
*/
public function totalTimeRequest()
{
$this->check();
return $this->response()->total_time();
}
/**
* @return array
* @throws \Exception
*/
public function extremesMin()
{
$this->init();
if (empty($this->extremes['min'])) {
return [];
}
return $this->extremes['min'];
}
/**
* @return array
* @throws \Exception
*/
public function extremesMax()
{
$this->init();
if (empty($this->extremes['max'])) {
return [];
}
return $this->extremes['max'];
}
/**
* @return array
* @throws Exception\TransportException
*/
public function totals()
{
$this->init();
return $this->totals;
}
/**
*
*/
public function dumpRaw()
{
print_r($this->_rawData);
}
/**
*
*/
public function dump()
{
$this->_request->dump();
$this->response()->dump();
}
/**
* @return bool|int
* @throws Exception\TransportException
*/
public function countAll()
{
$this->init();
return $this->rows_before_limit_at_least;
}
/**
* @param bool $key
* @return array|mixed|null
* @throws Exception\TransportException
*/
public function statistics($key = false)
{
$this->init();
if ($key)
{
if (!is_array($this->statistics)) {
return null;
}
if (!isset($this->statistics[$key])) {
return null;
}
return $this->statistics[$key];
}
return $this->statistics;
}
/**
* @return int
* @throws Exception\TransportException
*/
public function count()
{
$this->init();
return $this->rows;
}
/**
* @return mixed|string
* @throws Exception\TransportException
*/
public function rawData()
{
if ($this->_init) {
return $this->_rawData;
}
$this->check();
return $this->response()->rawDataOrJson($this->format);
}
/**
* @param string $key
* @return mixed|null
* @throws Exception\TransportException
*/
public function fetchOne($key = '')
{
$this->init();
if (isset($this->array_data[0])) {
if ($key) {
if (isset($this->array_data[0][$key])) {
return $this->array_data[0][$key];
} else {
return null;
}
}
return $this->array_data[0];
}
return null;
}
/**
* @param string|null $path
* @return array
* @throws Exception\TransportException
*/
public function rowsAsTree($path)
{
$this->init();
$out = [];
foreach ($this->array_data as $row) {
$d = $this->array_to_tree($row, $path);
$out = array_replace_recursive($d, $out);
}
return $out;
}
/**
* Return size_upload,upload_content,speed_upload,time_request
*
* @return array
* @throws Exception\TransportException
*/
public function info_upload()
{
$this->check();
return [
'size_upload' => $this->response()->size_upload(),
'upload_content' => $this->response()->upload_content_length(),
'speed_upload' => $this->response()->speed_upload(),
'time_request' => $this->response()->total_time()
];
}
/**
* Return size_upload,upload_content,speed_upload,time_request,starttransfer_time,size_download,speed_download
*
* @return array
* @throws Exception\TransportException
*/
public function info()
{
$this->check();
return [
'starttransfer_time' => $this->response()->starttransfer_time(),
'size_download' => $this->response()->size_download(),
'speed_download' => $this->response()->speed_download(),
'size_upload' => $this->response()->size_upload(),
'upload_content' => $this->response()->upload_content_length(),
'speed_upload' => $this->response()->speed_upload(),
'time_request' => $this->response()->total_time()
];
}
/**
* get format in sql
* @return mixed
*/
public function getFormat()
{
return $this->format;
}
/**
* @return array
* @throws Exception\TransportException
*/
public function rows()
{
$this->init();
return $this->array_data;
}
/**
* @param array|string $arr
* @param null|string|array $path
* @return array
*/
private function array_to_tree($arr, $path = null)
{
if (is_array($path)) {
$keys = $path;
} else {
$args = func_get_args();
array_shift($args);
if (sizeof($args) < 2) {
$separator = '.';
$keys = explode($separator, $path);
} else {
$keys = $args;
}
}
//
$tree = $arr;
while (count($keys)) {
$key = array_pop($keys);
if (isset($arr[$key])) {
$val = $arr[$key];
} else {
$val = $key;
}
$tree = array($val => $tree);
}
if (!is_array($tree)) {
return [];
}
return $tree;
}
}

View File

@ -0,0 +1,732 @@
<?php
namespace ClickHouseDB\Transport;
class CurlerRequest
{
/**
* @var array
*/
public $extendinfo = array();
/**
* @var string|array
*/
private $parameters = '';
/**
* @var array
*/
private $options;
/**
* @var array
*/
private $headers; // Parsed reponse header object.
/**
* @var string
*/
private $url;
/**
* @var string
*/
private $method;
/**
* @var bool
*/
private $id;
/**
* @var resource|null
*/
private $handle;
/**
* @var CurlerResponse
*/
private $resp = null;
/**
* @var bool
*/
private $_persistent = false;
/**
* @var bool
*/
private $_attachFiles = false;
/**
* @var string
*/
private $callback_class = '';
/**
* @var string
*/
private $callback_functionName = '';
/**
* @var bool
*/
private $_httpCompression = false;
/**
* @var callable
*/
private $callback_function = null;
/**
* @var bool|resource
*/
private $infile_handle = false;
/**
* @var int
*/
private $_dns_cache = 120;
/**
* @var resource
*/
private $resultFileHandle = null;
/**
* @param bool $id
*/
public function __construct($id = false)
{
$this->id = $id;
$this->header('Cache-Control', 'no-cache, no-store, must-revalidate');
$this->header('Expires', '0');
$this->header('Pragma', 'no-cache');
$this->options = array(
CURLOPT_SSL_VERIFYHOST => 0,
CURLOPT_SSL_VERIFYPEER => false,
CURLOPT_TIMEOUT => 10,
CURLOPT_CONNECTTIMEOUT => 5, // Количество секунд ожидания при попытке соединения
CURLOPT_HTTP_VERSION => CURL_HTTP_VERSION_1_1,
CURLOPT_MAXREDIRS => 10,
CURLOPT_HEADER => TRUE,
CURLOPT_FOLLOWLOCATION => TRUE,
CURLOPT_AUTOREFERER => 1, // при редиректе подставлять в «Referer:» значение из «Location:»
CURLOPT_BINARYTRANSFER => 1, // передавать в binary-safe
CURLOPT_RETURNTRANSFER => TRUE,
CURLOPT_USERAGENT => 'smi2/PHPClickHouse/client',
);
}
/**
*
*/
public function __destruct()
{
$this->close();
}
public function close()
{
if ($this->handle)
{
curl_close($this->handle);
}
$this->handle = null;
}
/**
* @param array $attachFiles
*/
public function attachFiles($attachFiles)
{
$this->header("Content-Type", "multipart/form-data");
$out = [];
foreach ($attachFiles as $post_name => $file_path) {
$out[$post_name] = new \CURLFile($file_path);
}
$this->_attachFiles = true;
$this->parameters($out);
}
/**
* @param bool $set
* @return $this
*/
public function id($set = false)
{
if ($set) {
$this->id = $set;
}
return $this;
}
/**
* @param array $params
* @return $this
*/
public function setRequestExtendedInfo($params)
{
$this->extendinfo = $params;
return $this;
}
/**
* @param string|integer|null $key
* @return mixed
*/
public function getRequestExtendedInfo($key = null)
{
if ($key) {
return isset($this->extendinfo[$key]) ? $this->extendinfo[$key] : false;
}
return $this->extendinfo;
}
/**
* @return bool|resource
*/
public function getInfileHandle()
{
return $this->infile_handle;
}
/**
* @param string $file_name
* @return bool|resource
*/
public function setInfile($file_name)
{
$this->header('Expect', '');
$this->infile_handle = fopen($file_name, 'r');
if (is_resource($this->infile_handle))
{
if ($this->_httpCompression) {
$this->header('Content-Encoding', 'gzip');
$this->header('Content-Type', 'application/x-www-form-urlencoded');
stream_filter_append($this->infile_handle, 'zlib.deflate', STREAM_FILTER_READ, ["window" => 30]);
$this->options[CURLOPT_SAFE_UPLOAD] = 1;
} else {
$this->options[CURLOPT_INFILESIZE] = filesize($file_name);
}
$this->options[CURLOPT_INFILE] = $this->infile_handle;
}
return $this->infile_handle;
}
/**
* @param callable $callback
*/
public function setCallbackFunction($callback)
{
$this->callback_function = $callback;
}
/**
* @param callable $callback
*/
public function setWriteFunction($callback)
{
$this->options[CURLOPT_WRITEFUNCTION]=$callback;
}
/**
* @param callable $callback
*/
public function setReadFunction($callback)
{
$this->options[CURLOPT_READFUNCTION] = $callback;
}
public function setHeaderFunction($callback)
{
$this->options[CURLOPT_HEADERFUNCTION] = $callback;
}
/**
* @param string $classCallBack
* @param string $functionName
*/
public function setCallback($classCallBack, $functionName)
{
$this->callback_class = $classCallBack;
$this->callback_functionName = $functionName;
}
/**
*
*/
public function onCallback()
{
if ($this->callback_function) {
$x = $this->callback_function;
$x($this);
}
if ($this->callback_class && $this->callback_functionName) {
$c = $this->callback_functionName;
$this->callback_class->$c($this);
}
}
/**
* @param bool $result
* @return string
*/
public function dump($result = false)
{
$message = "\n------------ Request ------------\n";
$message .= 'URL:' . $this->url . "\n\n";
$message .= 'METHOD:' . $this->method . "\n\n";
$message .= 'PARAMS:' . print_r($this->parameters, true) . "\n";
$message .= 'PARAMS:' . print_r($this->headers, true) . "\n";
$message .= "-----------------------------------\n";
if ($result) {
return $message;
}
echo $message;
return '';
}
/**
* @return bool
*/
public function getId()
{
return $this->id;
}
/**
* @param integer $key
* @param mixed $value
* @return $this
*/
private function option($key, $value)
{
$this->options[$key] = $value;
return $this;
}
/**
* @return $this
*/
public function persistent()
{
$this->_persistent = true;
return $this;
}
/**
* @return bool
*/
public function isPersistent()
{
return $this->_persistent;
}
/**
* @param int $sec
* @return $this
*/
public function keepAlive($sec = 60)
{
$this->options[CURLOPT_FORBID_REUSE] = TRUE;
$this->headers['Connection'] = 'Keep-Alive';
$this->headers['Keep-Alive'] = $sec;
return $this;
}
/**
* @param bool $flag
* @return $this
*/
public function verbose($flag = true)
{
$this->options[CURLOPT_VERBOSE] = $flag;
return $this;
}
/**
* @param string $key
* @param string $value
* @return $this
*/
public function header($key, $value)
{
$this->headers[$key] = $value;
return $this;
}
/**
* @return array
*/
public function getHeaders()
{
$head = [];
foreach ($this->headers as $key=>$value) {
$head[] = sprintf("%s: %s", $key, $value);
}
return $head;
}
/**
* @param string $url
* @return $this
*/
public function url($url)
{
$this->url = $url;
return $this;
}
/**
* @return mixed
*/
public function getUrl()
{
return $this->url;
}
/**
* @param string $id
* @return string
*/
public function getUniqHash($id)
{
return $id . '.' . microtime() . mt_rand(0, 1000000);
}
/**
* @param bool $flag
*/
public function httpCompression($flag)
{
if ($flag) {
$this->_httpCompression = $flag;
$this->options[CURLOPT_ENCODING] = 'gzip';
} else
{
$this->_httpCompression = false;
unset($this->options[CURLOPT_ENCODING]);
}
}
/**
* @param string $username
* @param string $password
* @return $this
*/
public function auth($username, $password)
{
$this->options[CURLOPT_USERPWD] = sprintf("%s:%s", $username, $password);
return $this;
}
/**
* @param array|string $data
* @return $this
*/
public function parameters($data)
{
$this->parameters = $data;
return $this;
}
/**
* The number of seconds to wait when trying to connect. Use 0 for infinite waiting.
*
* @param int $seconds
* @return $this
*/
public function connectTimeOut($seconds = 1)
{
$this->options[CURLOPT_CONNECTTIMEOUT] = $seconds;
return $this;
}
/**
* The maximum number of seconds (float) allowed to execute cURL functions.
*
* @param float $seconds
* @return $this
*/
public function timeOut($seconds = 10)
{
return $this->timeOutMs(intval($seconds * 1000));
}
/**
* The maximum allowed number of milliseconds to perform cURL functions.
*
* @param int $ms millisecond
* @return $this
*/
protected function timeOutMs($ms = 10000)
{
$this->options[CURLOPT_TIMEOUT_MS] = $ms;
return $this;
}
/**
* @param array|mixed $data
* @return $this
* @throws \ClickHouseDB\Exception\TransportException
*/
public function parameters_json($data)
{
$this->header("Content-Type", "application/json, text/javascript; charset=utf-8");
$this->header("Accept", "application/json, text/javascript, */*; q=0.01");
if ($data === null) {
$this->parameters = '{}';
return $this;
}
if (is_string($data)) {
$this->parameters = $data;
return $this;
}
$this->parameters = json_encode($data);
if (!$this->parameters && $data) {
throw new \ClickHouseDB\Exception\TransportException('Cant json_encode: ' . strval($data));
}
return $this;
}
/**
* @return resource
*/
public function getResultFileHandle()
{
return $this->resultFileHandle;
}
/**
* @return bool
*/
public function isResultFile()
{
return ($this->resultFileHandle ? true : false);
}
/**
* @param resource $h resource
* @param bool $zlib
* @return $this
*/
public function setResultFileHandle($h, $zlib = false)
{
$this->resultFileHandle = $h;
if ($zlib) {
$params = array('level' => 6, 'window' => 15, 'memory' => 9);
stream_filter_append($this->resultFileHandle, 'zlib.deflate', STREAM_FILTER_WRITE, $params);
}
return $this;
}
/**
* @return CurlerRequest
*/
public function PUT()
{
return $this->execute('PUT');
}
/**
* @return CurlerRequest
*/
public function POST()
{
return $this->execute('POST');
}
/**
* @return CurlerRequest
*/
public function OPTIONS()
{
return $this->execute('OPTIONS');
}
/**
* @return CurlerRequest
*/
public function GET()
{
return $this->execute('GET');
}
/**
* The number of seconds that DNS records are stored in memory. By default this parameter is 120 (2 minutes).
*
* @param integer $set
* @return $this
*/
public function setDnsCache($set)
{
$this->_dns_cache = $set;
return $this;
}
/**
* The number of seconds that DNS records are stored in memory. By default this parameter is 120 (2 minutes).
*
* @return int
*/
public function getDnsCache()
{
return $this->_dns_cache;
}
/**
* @param string $method
* @return $this
*/
private function execute($method)
{
$this->method = $method;
return $this;
}
/**
* @return CurlerResponse
* @throws \ClickHouseDB\Exception\TransportException
*/
public function response()
{
if (!$this->resp) {
throw new \ClickHouseDB\Exception\TransportException('Can`t fetch response - is empty');
}
return $this->resp;
}
/**
* @return bool
*/
public function isResponseExists()
{
return ($this->resp ? true : false);
}
public function setResponse(CurlerResponse $response)
{
$this->resp = $response;
}
/**
* @return mixed
*/
public function handle()
{
$this->prepareRequest();
return $this->handle;
}
/**
* @param callable $callback
* @throws \Exception
*/
public function setFunctionProgress(callable $callback)
{
if (!is_callable($callback)) {
throw new \Exception('setFunctionProgress not is_callable');
}
$this->option(CURLOPT_NOPROGRESS, false);
$this->option(CURLOPT_PROGRESSFUNCTION, $callback); // version 5.5.0
}
/**
* @return bool
*/
private function prepareRequest()
{
if (!$this->handle) {
$this->handle = curl_init();
}
$curl_opt = $this->options;
$method = $this->method;
if ($this->_attachFiles) {
$curl_opt[CURLOPT_SAFE_UPLOAD] = true;
}
if (strtoupper($method) == 'GET') {
$curl_opt[CURLOPT_HTTPGET] = TRUE;
$curl_opt[CURLOPT_CUSTOMREQUEST] = strtoupper($method);
$curl_opt[CURLOPT_POSTFIELDS] = false;
} else {
if (strtoupper($method) === 'POST') {
$curl_opt[CURLOPT_POST] = TRUE;
}
$curl_opt[CURLOPT_CUSTOMREQUEST] = strtoupper($method);
if ($this->parameters) {
$curl_opt[CURLOPT_POSTFIELDS] = $this->parameters;
if (!is_array($this->parameters)) {
$this->header('Content-Length', strlen($this->parameters));
}
}
}
// CURLOPT_DNS_CACHE_TIMEOUT - Количество секунд, в течение которых в памяти хранятся DNS-записи.
$curl_opt[CURLOPT_DNS_CACHE_TIMEOUT] = $this->getDnsCache();
$curl_opt[CURLOPT_URL] = $this->url;
if (!empty($this->headers) && sizeof($this->headers)) {
$curl_opt[CURLOPT_HTTPHEADER] = array();
foreach ($this->headers as $key => $value) {
$curl_opt[CURLOPT_HTTPHEADER][] = sprintf("%s: %s", $key, $value);
}
}
if (!empty($curl_opt[CURLOPT_INFILE])) {
$curl_opt[CURLOPT_PUT] = true;
}
if (!empty($curl_opt[CURLOPT_WRITEFUNCTION]))
{
$curl_opt[CURLOPT_HEADER]=false;
}
if ($this->resultFileHandle) {
$curl_opt[CURLOPT_FILE] = $this->resultFileHandle;
$curl_opt[CURLOPT_HEADER] = false;
}
if ($this->options[CURLOPT_VERBOSE]) {
echo "\n-----------BODY REQUEST----------\n" . $curl_opt[CURLOPT_POSTFIELDS] . "\n------END--------\n";
}
curl_setopt_array($this->handle, $curl_opt);
return true;
}
}

View File

@ -0,0 +1,302 @@
<?php
namespace ClickHouseDB\Transport;
class CurlerResponse
{
/**
* @var mixed
*/
public $_headers;
/**
* @var mixed
*/
public $_info;
/**
* @var mixed
*/
public $_error;
/**
* @var int
*/
public $_errorNo = 0;
/**
* @var float
*/
public $_useTime;
/**
* @var string
*/
public $_body;
/**
* Response constructor.
*/
public function __construct() {}
/**
* @return int
*/
public function error_no()
{
return $this->_errorNo;
}
/**
* @return mixed
*/
public function error()
{
return $this->_error;
}
/**
* @return mixed
*/
public function url()
{
return $this->_info['url'];
}
/**
* @return mixed
*/
public function total_time()
{
return round($this->_info['total_time'], 3);
}
/**
* @return string
*/
public function starttransfer_time()
{
return round($this->_info['starttransfer_time'], 3);
}
/**
* @return string
*/
public function connect_time()
{
return round($this->_info['connect_time'], 3);
}
/**
* @return string
*/
public function pretransfer_time()
{
return round($this->_info['pretransfer_time'], 3);
}
/**
* @return mixed
*/
public function content_type()
{
return $this->_info['content_type'];
}
/**
* @return mixed
*/
public function http_code()
{
return $this->_info['http_code'];
}
/**
* @param string $name
* @return null|string
*/
public function headers($name)
{
if (isset($this->_headers[$name])) {
return $this->_headers[$name];
}
return null;
}
/**
* @return null|string
*/
public function connection()
{
return $this->headers('Connection');
}
/**
* @return mixed
*/
public function body()
{
return $this->_body;
}
/**
* @return mixed
*/
public function as_string()
{
return $this->body();
}
/**
*
*/
public function dump_json()
{
print_r($this->json());
}
/**
* @param bool $result
* @return string
*/
public function dump($result = false)
{
$msg = "\n--------------------------- Response -------------------------------------\nBODY:\n";
$msg .= print_r($this->_body, true);
$msg .= "\nHEAD:\n";
$msg .= print_r($this->_headers, true);
$msg .= "\nERROR:\n" . $this->error();
$msg .= "\nINFO:\n";
$msg .= json_encode($this->_info);
$msg .= "\n----------------------------------------------------------------------\n";
if ($result) {
return $msg;
}
echo $msg;
}
/**
* @param int $size
* @param string $unit
* @return string
*/
private function humanFileSize($size, $unit = '')
{
if ((!$unit && $size >= 1 << 30) || $unit == 'GB') {
return number_format($size / (1 << 30), 2) . ' GB';
}
if ((!$unit && $size >= 1 << 20) || $unit == 'MB') {
return number_format($size / (1 << 20), 2) . ' MB';
}
if ((!$unit && $size >= 1 << 10) || $unit == 'KB') {
return number_format($size / (1 << 10), 2) . ' KB';
}
return number_format($size) . ' bytes';
}
/**
* @return string
*/
public function upload_content_length()
{
return $this->humanFileSize($this->_info['upload_content_length']);
}
/**
* @return string
*/
public function speed_upload()
{
$SPEED_UPLOAD = $this->_info['speed_upload'];
return round(($SPEED_UPLOAD * 8) / (1000 * 1000), 2) . ' Mbps';
}
/**
* @return string
*/
public function speed_download()
{
$SPEED_UPLOAD = $this->_info['speed_download'];
return round(($SPEED_UPLOAD * 8) / (1000 * 1000), 2) . ' Mbps';
}
/**
* @return string
*/
public function size_upload()
{
return $this->humanFileSize($this->_info['size_upload']);
}
/**
* @return string
*/
public function request_size()
{
return $this->humanFileSize($this->_info['request_size']);
}
/**
* @return string
*/
public function header_size()
{
return $this->humanFileSize($this->_info['header_size']);
}
/**
* @return string
*/
public function size_download()
{
return $this->humanFileSize($this->_info['size_download']);
}
/**
* @return mixed
*/
public function info()
{
return $this->_info;
}
/**
* @param string|null $key
* @return bool|mixed
*/
public function json($key = null)
{
$d = json_decode($this->body(), true);
if (!$key) {
return $d;
}
if (!isset($d[$key])) {
return false;
}
return $d[$key];
}
/**
* @return mixed
*/
public function rawDataOrJson($format)
{
// JSONCompact // JSONEachRow
if (stripos($format, 'json') !== false)
{
if (stripos($format,'JSONEachRow')===false)
return $this->json();
}
return $this->body();
}
}

View File

@ -0,0 +1,387 @@
<?php
namespace ClickHouseDB\Transport;
use ClickHouseDB\Exception\TransportException;
class CurlerRolling
{
/**
* @var int
*
* Max number of simultaneous requests.
*/
private $simultaneousLimit = 10;
/**
* @var array
*
* Requests currently being processed by curl
*/
private $activeRequests = [];
/**
* @var int
*/
private $runningRequests = 0;
/**
* @var CurlerRequest[]
*
* Requests queued to be processed
*/
private $pendingRequests = [];
/**
* @return int
*/
private $completedRequestCount = 0;
/**
* @var null|resource
*/
private $_pool_master = null;
/**
* @var int
*/
private $waitRequests = 0;
/**
* @var array
*/
private $handleMapTasks = [];
/**
*
*/
public function __destructor()
{
$this->close();
}
/**
* @return resource
*/
private function handlerMulti()
{
if (!$this->_pool_master) {
$this->_pool_master = curl_multi_init();
if (function_exists('curl_multi_setopt')) {
curl_multi_setopt($this->_pool_master, CURLMOPT_MAXCONNECTS, $this->simultaneousLimit);
}
}
return $this->_pool_master;
}
/**
*
*/
public function close()
{
if ($this->_pool_master) {
curl_multi_close($this->handlerMulti());
}
}
/**
* @param CurlerRequest $req
* @param bool $checkMultiAdd
* @param bool $force
* @return bool
* @throws TransportException
*/
public function addQueLoop(CurlerRequest $req, $checkMultiAdd = true, $force = false)
{
$id = $req->getId();
if (!$id) {
$id = $req->getUniqHash($this->completedRequestCount);
}
if (!$force && isset($this->pendingRequests[$id])) {
if (!$checkMultiAdd) {
return false;
}
throw new TransportException("Cant add exists que - cant overwrite : $id!\n");
}
$this->pendingRequests[$id] = $req;
return true;
}
/**
* @param resource $oneHandle
* @return CurlerResponse
*/
private function makeResponse($oneHandle)
{
$response = curl_multi_getcontent($oneHandle);
$header_size = curl_getinfo($oneHandle, CURLINFO_HEADER_SIZE);
$header = substr($response, 0, $header_size);
$body = substr($response, $header_size);
$n = new CurlerResponse();
$n->_headers = $this->parse_headers_from_curl_response($header);
$n->_body = $body;
$n->_info = curl_getinfo($oneHandle);
$n->_error = curl_error($oneHandle);
$n->_errorNo = curl_errno($oneHandle);
$n->_useTime = 0;
return $n;
}
/**
* @return bool
* @throws TransportException
*/
public function execLoopWait()
{
$c = 0;
$count=0;
// add all tasks
do {
$this->exec();
$loop = $this->countActive();
$pend = $this->countPending();
$count=$loop+$pend;
$c++;
if ($c > 20000) {
break;
}
usleep(500);
} while ($count);
return true;
}
/**
* @param string $response
* @return array
*/
private function parse_headers_from_curl_response($response)
{
$headers = [];
$header_text = $response;
foreach (explode("\r\n", $header_text) as $i => $line) {
if ($i === 0) {
$headers['http_code'] = $line;
} else {
$r = explode(': ', $line);
if (sizeof($r) == 2) {
$headers[$r[0]] = $r[1];
}
}
}
return $headers;
}
/**
* @return int
*/
public function countPending()
{
return sizeof($this->pendingRequests);
}
/**
* @return int
*/
public function countActive()
{
return count($this->activeRequests);
}
/**
* @return int
*/
public function countCompleted()
{
return $this->completedRequestCount;
}
/**
* Set the limit for how many cURL requests will be execute simultaneously.
*
* Please be mindful that if you set this too high, requests are likely to fail
* more frequently or automated software may perceive you as a DOS attack and
* automatically block further requests.
*
* @param int $count
* @throws \InvalidArgumentException
* @return $this
*/
public function setSimultaneousLimit($count)
{
if (!is_int($count) || $count < 2) {
throw new \InvalidArgumentException("setSimultaneousLimit count must be an int >= 2");
}
$this->simultaneousLimit = $count;
return $this;
}
/**
* @return int
*/
public function getSimultaneousLimit()
{
return $this->simultaneousLimit;
}
/**
* @return int
*/
public function getRunningRequests()
{
return $this->runningRequests;
}
/**
* @param CurlerRequest $request
* @param bool $auto_close
* @return mixed
* @throws TransportException
*/
public function execOne(CurlerRequest $request, $auto_close = false)
{
$h = $request->handle();
curl_exec($h);
$request->setResponse($this->makeResponse($h));
if ($auto_close) {
$request->close();
}
return $request->response()->http_code();
}
/**
* @return string
*/
public function getInfo()
{
return "runningRequests = {$this->runningRequests} , pending=" . sizeof($this->pendingRequests) . " ";
}
/**
* @throws TransportException
*/
public function exec()
{
$this->makePendingRequestsQue();
// ensure we're running
// a request was just completed -- find out which one
while (($execrun = curl_multi_exec($this->handlerMulti(), $running)) == CURLM_CALL_MULTI_PERFORM);
if ($execrun != CURLM_OK) {
throw new TransportException("[ NOT CURLM_OK ]");
}
$this->runningRequests = $running;
while ($done = curl_multi_info_read($this->handlerMulti())) {
$response = $this->makeResponse($done['handle']);
// send the return values to the callback function.
$key = (string) $done['handle'];
$task_id = $this->handleMapTasks[$key];
$request = $this->pendingRequests[$this->handleMapTasks[$key]];
unset($this->handleMapTasks[$key]);
unset($this->activeRequests[$task_id]);
$this->pendingRequests[$task_id]->setResponse($response);
$this->pendingRequests[$task_id]->onCallback();
if (!$request->isPersistent()) {
unset($this->pendingRequests[$task_id]);
}
$this->completedRequestCount++;
// remove the curl handle that just completed
curl_multi_remove_handle($this->handlerMulti(), $done['handle']);
// if something was requeued, this will get it running/update our loop check values
$status = curl_multi_exec($this->handlerMulti(), $active);
}
// see if there is anything to read
curl_multi_select($this->handlerMulti(), 0.01);
return $this->countActive();
}
public function makePendingRequestsQue()
{
$max = $this->getSimultaneousLimit();
$active = $this->countActive();
if ($active < $max) {
$canAdd = $max - $active;
// $pending = sizeof($this->pendingRequests);
$add = [];
foreach ($this->pendingRequests as $task_id => $params) {
if (empty($this->activeRequests[$task_id])) {
$add[$task_id] = $task_id;
}
}
if (sizeof($add)) {
if ($canAdd >= sizeof($add)) {
$ll = $add;
} else {
$ll = array_rand($add, $canAdd);
if (!is_array($ll)) {
$ll = array($ll => $ll);
}
}
foreach ($ll as $task_id) {
$this->_prepareLoopQue($task_id);
}
}// if add
}// if can add
}
/**
* @param string $task_id
*/
private function _prepareLoopQue($task_id)
{
$this->activeRequests[$task_id] = 1;
$this->waitRequests++;
$h = $this->pendingRequests[$task_id]->handle();
// pool
curl_multi_add_handle($this->handlerMulti(), $h);
$key = (string) $h;
$this->handleMapTasks[$key] = $task_id;
}
}

View File

@ -0,0 +1,709 @@
<?php
namespace ClickHouseDB\Transport;
use ClickHouseDB\Exception\TransportException;
use ClickHouseDB\Query\Degeneration;
use ClickHouseDB\Query\Query;
use ClickHouseDB\Query\WhereInFile;
use ClickHouseDB\Query\WriteToFile;
use ClickHouseDB\Settings;
use ClickHouseDB\Statement;
use const PHP_EOL;
class Http
{
/**
* @var string
*/
private $_username = null;
/**
* @var string
*/
private $_password = null;
/**
* @var string
*/
private $_host = '';
/**
* @var int
*/
private $_port = 0;
/**
* @var bool|int
*/
private $_verbose = false;
/**
* @var CurlerRolling
*/
private $_curler = null;
/**
* @var Settings
*/
private $_settings = null;
/**
* @var array
*/
private $_query_degenerations = [];
/**
* Count seconds (int)
*
* @var int
*/
private $_connectTimeOut = 5;
/**
* @var callable
*/
private $xClickHouseProgress = null;
/**
* Http constructor.
* @param string $host
* @param int $port
* @param string $username
* @param string $password
*/
public function __construct($host, $port, $username, $password)
{
$this->setHost($host, $port);
$this->_username = $username;
$this->_password = $password;
$this->_settings = new Settings($this);
$this->setCurler();
}
public function setCurler()
{
$this->_curler = new CurlerRolling();
}
/**
* @return CurlerRolling
*/
public function getCurler()
{
return $this->_curler;
}
/**
* @param string $host
* @param int $port
*/
public function setHost($host, $port = -1)
{
if ($port > 0) {
$this->_port = $port;
}
$this->_host = $host;
}
/**
* @return string
*/
public function getUri()
{
$proto = 'http';
if ($this->settings()->isHttps()) {
$proto = 'https';
}
$uri = $proto . '://' . $this->_host;
if (stripos($this->_host,'/')!==false || stripos($this->_host,':')!==false) {
return $uri;
}
if (intval($this->_port)>0) {
return $uri . ':' . $this->_port;
}
return $uri;
}
/**
* @return Settings
*/
public function settings()
{
return $this->_settings;
}
/**
* @param bool|int $flag
* @return mixed
*/
public function verbose($flag)
{
$this->_verbose = $flag;
return $flag;
}
/**
* @param array $params
* @return string
*/
private function getUrl($params = [])
{
$settings = $this->settings()->getSettings();
if (is_array($params) && sizeof($params)) {
$settings = array_merge($settings, $params);
}
if ($this->settings()->isReadOnlyUser())
{
unset($settings['extremes']);
unset($settings['readonly']);
unset($settings['enable_http_compression']);
unset($settings['max_execution_time']);
}
unset($settings['https']);
return $this->getUri() . '?' . http_build_query($settings);
}
/**
* @param array $extendinfo
* @return CurlerRequest
*/
private function newRequest($extendinfo)
{
$new = new CurlerRequest();
$new->auth($this->_username, $this->_password)
->POST()
->setRequestExtendedInfo($extendinfo);
if ($this->settings()->isEnableHttpCompression()) {
$new->httpCompression(true);
}
if ($this->settings()->getSessionId())
{
$new->persistent();
}
$new->timeOut($this->settings()->getTimeOut());
$new->connectTimeOut($this->_connectTimeOut)->keepAlive(); // one sec
$new->verbose(boolval($this->_verbose));
return $new;
}
/**
* @param Query $query
* @param array $urlParams
* @param bool $query_as_string
* @return CurlerRequest
* @throws \ClickHouseDB\Exception\TransportException
*/
private function makeRequest(Query $query, $urlParams = [], $query_as_string = false)
{
$sql = $query->toSql();
if ($query_as_string) {
$urlParams['query'] = $sql;
}
$url = $this->getUrl($urlParams);
$extendinfo = [
'sql' => $sql,
'query' => $query,
'format'=> $query->getFormat()
];
$new = $this->newRequest($extendinfo);
$new->url($url);
if (!$query_as_string) {
$new->parameters_json($sql);
}
if ($this->settings()->isEnableHttpCompression()) {
$new->httpCompression(true);
}
return $new;
}
/**
* @param string|Query $sql
* @return CurlerRequest
*/
public function writeStreamData($sql)
{
if ($sql instanceof Query) {
$query=$sql;
} else {
$query = new Query($sql);
}
$url = $this->getUrl([
'readonly' => 0,
'query' => $query->toSql()
]);
$extendinfo = [
'sql' => $sql,
'query' => $query,
'format'=> $query->getFormat()
];
$request = $this->newRequest($extendinfo);
$request->url($url);
return $request;
}
/**
* @param string $sql
* @param string $file_name
* @return Statement
* @throws \ClickHouseDB\Exception\TransportException
*/
public function writeAsyncCSV($sql, $file_name)
{
$query = new Query($sql);
$url = $this->getUrl([
'readonly' => 0,
'query' => $query->toSql()
]);
$extendinfo = [
'sql' => $sql,
'query' => $query,
'format'=> $query->getFormat()
];
$request = $this->newRequest($extendinfo);
$request->url($url);
$request->setCallbackFunction(function(CurlerRequest $request) {
$handle = $request->getInfileHandle();
if (is_resource($handle)) {
fclose($handle);
}
});
$request->setInfile($file_name);
$this->_curler->addQueLoop($request);
return new Statement($request);
}
/**
* get Count Pending Query in Queue
*
* @return int
*/
public function getCountPendingQueue()
{
return $this->_curler->countPending();
}
/**
* set Connect TimeOut in seconds [CURLOPT_CONNECTTIMEOUT] ( int )
*
* @param int $connectTimeOut
*/
public function setConnectTimeOut($connectTimeOut)
{
$this->_connectTimeOut = $connectTimeOut;
}
/**
* get ConnectTimeOut in seconds
*
* @return int
*/
public function getConnectTimeOut()
{
return $this->_connectTimeOut;
}
public function __findXClickHouseProgress($handle)
{
$code = curl_getinfo($handle, CURLINFO_HTTP_CODE);
// Search X-ClickHouse-Progress
if ($code == 200) {
$response = curl_multi_getcontent($handle);
$header_size = curl_getinfo($handle, CURLINFO_HEADER_SIZE);
if (!$header_size) {
return false;
}
$header = substr($response, 0, $header_size);
if (!$header_size) {
return false;
}
$pos = strrpos($header, 'X-ClickHouse-Progress');
if (!$pos) {
return false;
}
$last = substr($header, $pos);
$data = @json_decode(str_ireplace('X-ClickHouse-Progress:', '', $last), true);
if ($data && is_callable($this->xClickHouseProgress)) {
if (is_array($this->xClickHouseProgress)) {
call_user_func_array($this->xClickHouseProgress, [$data]);
} else {
call_user_func($this->xClickHouseProgress, $data);
}
}
}
}
/**
* @param Query $query
* @param null|WhereInFile $whereInFile
* @param null|WriteToFile $writeToFile
* @return CurlerRequest
* @throws \Exception
*/
public function getRequestRead(Query $query, $whereInFile = null, $writeToFile = null)
{
$urlParams = ['readonly' => 1];
$query_as_string = false;
// ---------------------------------------------------------------------------------
if ($whereInFile instanceof WhereInFile && $whereInFile->size()) {
// $request = $this->prepareSelectWhereIn($request, $whereInFile);
$structure = $whereInFile->fetchUrlParams();
// $structure = [];
$urlParams = array_merge($urlParams, $structure);
$query_as_string = true;
}
// ---------------------------------------------------------------------------------
// if result to file
if ($writeToFile instanceof WriteToFile && $writeToFile->fetchFormat()) {
$query->setFormat($writeToFile->fetchFormat());
unset($urlParams['extremes']);
}
// ---------------------------------------------------------------------------------
// makeRequest read
$request = $this->makeRequest($query, $urlParams, $query_as_string);
// ---------------------------------------------------------------------------------
// attach files
if ($whereInFile instanceof WhereInFile && $whereInFile->size()) {
$request->attachFiles($whereInFile->fetchFiles());
}
// ---------------------------------------------------------------------------------
// result to file
if ($writeToFile instanceof WriteToFile && $writeToFile->fetchFormat()) {
$fout = fopen($writeToFile->fetchFile(), 'w');
if (is_resource($fout)) {
$isGz = $writeToFile->getGzip();
if ($isGz) {
// write gzip header
// "\x1f\x8b\x08\x00\x00\x00\x00\x00"
// fwrite($fout, "\x1F\x8B\x08\x08".pack("V", time())."\0\xFF", 10);
// write the original file name
// $oname = str_replace("\0", "", basename($writeToFile->fetchFile()));
// fwrite($fout, $oname."\0", 1+strlen($oname));
fwrite($fout, "\x1f\x8b\x08\x00\x00\x00\x00\x00");
}
$request->setResultFileHandle($fout, $isGz)->setCallbackFunction(function(CurlerRequest $request) {
fclose($request->getResultFileHandle());
});
}
}
if ($this->xClickHouseProgress)
{
$request->setFunctionProgress([$this, '__findXClickHouseProgress']);
}
// ---------------------------------------------------------------------------------
return $request;
}
public function cleanQueryDegeneration()
{
$this->_query_degenerations = [];
return true;
}
public function addQueryDegeneration(Degeneration $degeneration)
{
$this->_query_degenerations[] = $degeneration;
return true;
}
/**
* @param Query $query
* @return CurlerRequest
* @throws \ClickHouseDB\Exception\TransportException
*/
public function getRequestWrite(Query $query)
{
$urlParams = ['readonly' => 0];
return $this->makeRequest($query, $urlParams);
}
/**
* @throws TransportException
*/
public function ping() : bool
{
$request = new CurlerRequest();
$request->url($this->getUri())->verbose(false)->GET()->connectTimeOut($this->getConnectTimeOut());
$this->_curler->execOne($request);
return $request->response()->body() === 'Ok.' . PHP_EOL;
}
/**
* @param string $sql
* @param mixed[] $bindings
* @return Query
*/
private function prepareQuery($sql, $bindings)
{
// add Degeneration query
foreach ($this->_query_degenerations as $degeneration) {
$degeneration->bindParams($bindings);
}
return new Query($sql, $this->_query_degenerations);
}
/**
* @param Query|string $sql
* @param mixed[] $bindings
* @param null|WhereInFile $whereInFile
* @param null|WriteToFile $writeToFile
* @return CurlerRequest
* @throws \Exception
*/
private function prepareSelect($sql, $bindings, $whereInFile, $writeToFile = null)
{
if ($sql instanceof Query) {
return $this->getRequestWrite($sql);
}
$query = $this->prepareQuery($sql, $bindings);
$query->setFormat('JSON');
return $this->getRequestRead($query, $whereInFile, $writeToFile);
}
/**
* @param Query|string $sql
* @param mixed[] $bindings
* @return CurlerRequest
* @throws \ClickHouseDB\Exception\TransportException
*/
private function prepareWrite($sql, $bindings = [])
{
if ($sql instanceof Query) {
return $this->getRequestWrite($sql);
}
$query = $this->prepareQuery($sql, $bindings);
return $this->getRequestWrite($query);
}
/**
* @return bool
* @throws \ClickHouseDB\Exception\TransportException
*/
public function executeAsync()
{
return $this->_curler->execLoopWait();
}
/**
* @param Query|string $sql
* @param mixed[] $bindings
* @param null|WhereInFile $whereInFile
* @param null|WriteToFile $writeToFile
* @return Statement
* @throws \ClickHouseDB\Exception\TransportException
* @throws \Exception
*/
public function select($sql, array $bindings = [], $whereInFile = null, $writeToFile = null)
{
$request = $this->prepareSelect($sql, $bindings, $whereInFile, $writeToFile);
$this->_curler->execOne($request);
return new Statement($request);
}
/**
* @param Query|string $sql
* @param mixed[] $bindings
* @param null|WhereInFile $whereInFile
* @param null|WriteToFile $writeToFile
* @return Statement
* @throws \ClickHouseDB\Exception\TransportException
* @throws \Exception
*/
public function selectAsync($sql, array $bindings = [], $whereInFile = null, $writeToFile = null)
{
$request = $this->prepareSelect($sql, $bindings, $whereInFile, $writeToFile);
$this->_curler->addQueLoop($request);
return new Statement($request);
}
/**
* @param callable $callback
*/
public function setProgressFunction(callable $callback)
{
$this->xClickHouseProgress = $callback;
}
/**
* @param string $sql
* @param mixed[] $bindings
* @param bool $exception
* @return Statement
* @throws \ClickHouseDB\Exception\TransportException
*/
public function write($sql, array $bindings = [], $exception = true)
{
$request = $this->prepareWrite($sql, $bindings);
$this->_curler->execOne($request);
$response = new Statement($request);
if ($exception) {
if ($response->isError()) {
$response->error();
}
}
return $response;
}
/**
* @param Stream $streamRW
* @param CurlerRequest $request
* @return Statement
* @throws \ClickHouseDB\Exception\TransportException
*/
private function streaming(Stream $streamRW,CurlerRequest $request)
{
$callable=$streamRW->getClosure();
$stream=$streamRW->getStream();
try {
if (!is_callable($callable)) {
if ($streamRW->isWrite())
{
$callable = function ($ch, $fd, $length) use ($stream) {
return ($line = fread($stream, $length)) ? $line : '';
};
} else {
$callable = function ($ch, $fd) use ($stream) {
return fwrite($stream, $fd);
};
}
}
if ($streamRW->isGzipHeader()) {
if ($streamRW->isWrite())
{
$request->header('Content-Encoding', 'gzip');
$request->header('Content-Type', 'application/x-www-form-urlencoded');
} else {
$request->header('Accept-Encoding', 'gzip');
}
}
$request->header('Transfer-Encoding', 'chunked');
if ($streamRW->isWrite())
{
$request->setReadFunction($callable);
} else {
$request->setWriteFunction($callable);
// $request->setHeaderFunction($callableHead);
}
$this->_curler->execOne($request,true);
$response = new Statement($request);
if ($response->isError()) {
$response->error();
}
return $response;
} finally {
if ($streamRW->isWrite())
fclose($stream);
}
}
/**
* @param Stream $streamRead
* @param string $sql
* @param mixed[] $bindings
* @return Statement
* @throws \ClickHouseDB\Exception\TransportException
*/
public function streamRead(Stream $streamRead,$sql,$bindings=[])
{
$sql=$this->prepareQuery($sql,$bindings);
$request=$this->getRequestRead($sql);
return $this->streaming($streamRead,$request);
}
/**
* @param Stream $streamWrite
* @param string $sql
* @param mixed[] $bindings
* @return Statement
* @throws \ClickHouseDB\Exception\TransportException
*/
public function streamWrite(Stream $streamWrite,$sql,$bindings=[])
{
$sql=$this->prepareQuery($sql,$bindings);
$request = $this->writeStreamData($sql);
return $this->streaming($streamWrite,$request);
}
}

View File

@ -0,0 +1,16 @@
<?php
namespace ClickHouseDB\Transport;
/**
* @package ClickHouseDB\Transport
*/
interface IStream
{
public function isGzipHeader();
public function closure(callable $callable);
public function getStream();
public function getClosure();
public function isWrite();
public function applyGzip();
}

View File

@ -0,0 +1,74 @@
<?php
namespace ClickHouseDB\Transport;
/**
* Class Stream
* @package ClickHouseDB\Transport
*/
abstract class Stream implements IStream
{
/**
* @var resource
*/
private $source;
/**
* @var bool
*/
private $gzip=false;
/**
* @var null|callable
*/
private $callable=null;
/**
* @param resource $source
*/
public function __construct($source)
{
if (!is_resource($source)) {
throw new \InvalidArgumentException('Argument $source must be resource');
}
$this->source = $source;
}
/**
* @return bool
*/
public function isGzipHeader()
{
return $this->gzip;
}
/**
* @return callable|null
*/
public function getClosure()
{
return $this->callable;
}
/**
* @return resource
*/
public function getStream()
{
return $this->source;
}
/**
* @param callable $callable
*/
public function closure(callable $callable)
{
$this->callable=$callable;
}
/**
*
*/
public function enableGzipHeader()
{
$this->gzip=true;
}
}

View File

@ -0,0 +1,72 @@
<?php
namespace ClickHouseDB\Transport;
use ClickHouseDB\Statement;
/**
* Class StreamInsert
* @deprecated
* @package ClickHouseDB\Transport
*/
class StreamInsert
{
/**
* @var resource
*/
private $source;
/**
* @var CurlerRequest
*/
private $request;
/**
* @var CurlerRolling
*/
private $curlerRolling;
/**
* @param resource $source
* @param CurlerRequest $request
* @param CurlerRolling|null $curlerRolling
*/
public function __construct($source, CurlerRequest $request, $curlerRolling=null)
{
if (!is_resource($source)) {
throw new \InvalidArgumentException('Argument $source must be resource');
}
if ($curlerRolling instanceof CurlerRolling)
{
$this->curlerRolling = $curlerRolling;
} else {
$this->curlerRolling = new CurlerRolling();
}
$this->source = $source;
$this->request = $request;
}
/**
* @param callable $callback function for stream read data
* @return \ClickHouseDB\Statement
* @throws \Exception
*/
public function insert($callback)
{
try {
if (!is_callable($callback)) {
throw new \InvalidArgumentException('Argument $callback can not be called as a function');
}
//
$this->request->header('Transfer-Encoding', 'chunked');
$this->request->setReadFunction($callback);
$this->curlerRolling->execOne($this->request, true);
$statement = new Statement($this->request);
$statement->error();
return $statement;
} finally {
fclose($this->source);
}
}
}

View File

@ -0,0 +1,20 @@
<?php
namespace ClickHouseDB\Transport;
/**
* Class StreamRead
* @package ClickHouseDB\Transport
*/
class StreamRead extends Stream
{
public function isWrite()
{
return false;
}
public function applyGzip()
{
// stream_filter_append($this->source, 'zlib.deflate', STREAM_FILTER_READ, ['window' => 30]);
$this->enableGzipHeader();
}
}

View File

@ -0,0 +1,28 @@
<?php
namespace ClickHouseDB\Transport;
/**
* Class StreamWrite
* @package ClickHouseDB\Transport
*/
class StreamWrite extends Stream
{
public function __construct( $source)
{
parent::__construct($source);
}
public function isWrite()
{
return true;
}
public function applyGzip()
{
stream_filter_append($this->getStream(), 'zlib.deflate', STREAM_FILTER_READ, ['window' => 30]);
$this->enableGzipHeader();
}
}

View File

@ -0,0 +1,9 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Type;
interface NumericType extends Type
{
}

View File

@ -0,0 +1,13 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Type;
interface Type
{
/**
* @return mixed
*/
public function getValue();
}

View File

@ -0,0 +1,40 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Type;
final class UInt64 implements NumericType
{
/** @var string */
public $value;
private function __construct(string $uint64Value)
{
$this->value = $uint64Value;
}
/**
* @return self
*/
public static function fromString(string $uint64Value)
{
return new self($uint64Value);
}
/**
* @return string
*/
public function getValue()
{
return $this->value;
}
/**
* @return string
*/
public function __toString()
{
return $this->value;
}
}

View File

@ -0,0 +1,40 @@
<?php
namespace ClickHouseDB\Tests;
use ClickHouseDB\Exception\QueryException;
use PHPUnit\Framework\TestCase;
/**
* Class AsyncSelectTest
* @group AsyncSelect
* @package ClickHouseDB\Tests
*/
final class AsyncSelectTest extends TestCase
{
use WithClient;
public function testselectAsyncFail()
{
$counter=rand(150,400);
$list=[];
for ($f=0;$f<$counter;$f++)
{
$list[$f]=$this->client->selectAsync('SELECT {num} as num',['num'=>$f]);
}
$this->client->executeAsync();
for ($f=0;$f<$counter;$f++)
{
$ResultInt=0;
try {
$ResultInt=$list[$f]->fetchOne('num');
} catch (\Exception $E)
{
}
$this->assertEquals($f, $ResultInt);
}
}
}

View File

@ -0,0 +1,222 @@
<?php
namespace ClickHouseDB\Tests;
use ClickHouseDB\Exception\UnsupportedValueType;
use ClickHouseDB\Query\Degeneration\Bindings;
use DateTimeImmutable;
use PHPUnit\Framework\TestCase;
use function curl_init;
/**
* @group BindingsTest
*/
final class BindingsTest extends TestCase
{
use WithClient;
/**
* @return array
*/
public function escapeDataProvider()
{
return [
[
'select * from test. WHERE id = :id',
['id' => 1],
'select * from test. WHERE id = 1',
],
[
'select * from test. WHERE id = :id',
['id' => '1'],
"select * from test. WHERE id = '1'",
],
[
'select * from test. WHERE date_column = :dateParam',
['dateParam' => new DateTimeImmutable('2018-08-31 23:54:02')],
"select * from test. WHERE date_column = '2018-08-31 23:54:02'",
],
[
'select * from test. WHERE a_column = :objectWithToString',
[
'objectWithToString' => new class() {
/**
* @return string
*/
public function __toString()
{
return 'expectedValue';
}
},
],
"select * from test. WHERE a_column = 'expectedValue'",
],
[
'select * from test. WHERE id IN (:id)',
['id' => [1, 2]],
'select * from test. WHERE id IN (1,2)',
],
[
'select * from test. WHERE id IN (:id)',
['id' => ["1", "2"]],
'select * from test. WHERE id IN (\'1\',\'2\')',
],
[
'select * from test. WHERE id IN (:id)',
['id' => ["1", 222,333]],
'select * from test. WHERE id IN (\'1\',222,333)',
],
[
'select * from test. WHERE id IN (:id)',
['id' => ['1', "2') OR ('1'='1"]],
"select * from test. WHERE id IN ('1','2\') OR (\'1\'=\'1')",
],
[
'select * from test. WHERE id = :id',
['id' => "2' OR (1=1)"],
"select * from test. WHERE id = '2\' OR (1=1)'",
],
];
}
public function testBindselectAsync()
{
// https://github.com/bcit-ci/CodeIgniter/blob/develop/system/database/DB_driver.php#L920
$a=$this->client->selectAsync("SELECT :a, :a2", [
"a" => "a",
"a2" => "a2"
]);
$this->assertEquals("SELECT 'a', 'a2' FORMAT JSON",$a->sql());
$a=$this->client->selectAsync("SELECT :a, :a2", [
"a1" => "x",
"a2" => "x"
]);
$this->assertEquals("SELECT :a, 'x' FORMAT JSON",$a->sql());
$a=$this->client->selectAsync("SELECT {a}, {b}", [
"a" => ":b",
"b" => ":B"
]);
$this->assertEquals("SELECT ':B', :B FORMAT JSON",$a->sql());
$a=$this->client->selectAsync("SELECT {a}, {b}", [
"a" => ":b",
"b" => ":B"
]);
$this->assertEquals("SELECT ':B', :B FORMAT JSON",$a->sql());
$arr=[
'a'=>'[A]',
'b'=>'[B]',
'c'=>'[C]',
'aa'=>'[AA]',
'bb'=>'[BB]',
'a1'=>'[A1]',
'a2'=>'[A2]',
'a3'=>'[A3]',
'a11'=>'[A11]',
'a23'=>'[A23]',
'A23'=>'[-23]',
'a5'=>'[a5]',
'arra'=>[1,2,3,4],
];
$a=$this->client->selectAsync(":a :b :c :aa :bb :cc ", $arr);
$this->assertEquals("'[A]' '[B]' '[C]' '[AA]' '[BB]' :cc FORMAT JSON",$a->sql());
$a=$this->client->selectAsync(":a1 :a2 :a3 :a11 :a23 :a5 :arra", $arr);
$this->assertEquals("'[A1]' '[A2]' '[A3]' '[A11]' '[A23]' '[a5]' 1,2,3,4 FORMAT JSON",$a->sql());
$a=$this->client->selectAsync("{a1} {a2} {a3} {a11} {a23} {a5} {arra}", $arr);
$this->assertEquals("[A1] [A2] [A3] [A11] [A23] [a5] 1, 2, 3, 4 FORMAT JSON",$a->sql());
$keys=[
'key1'=>1,
'key111'=>111,
'key11'=>11,
'key123' => 123,
];
$this->assertEquals(
'123=123 , 11=11, 111=111, 1=1, 1= 1, 123=123 FORMAT JSON',
$this->client->selectAsync('123=:key123 , 11={key11}, 111={key111}, 1={key1}, 1= :key1, 123=:key123', $keys)->sql()
);
$keys=[
'A'=>'{B}',
'B'=>':C',
'C'=>123,
'Z'=>[':C',':B',':C']
];
$this->assertEquals(
'123 \':C\',\':B\',\':C\' FORMAT JSON',
$this->client->selectAsync('{A} :Z', $keys)->sql()
);
}
/**
* @param string $sql Given SQL
* @param array $params Params
* @param string $expectedSql Expected SQL
* @dataProvider escapeDataProvider
*/
public function testEscape($sql, $params, $expectedSql)
{
$bindings = new Bindings();
$bindings->bindParams($params);
$sql = $bindings->process($sql);
$this->assertSame($expectedSql, $sql);
}
/**
* @return void
*/
public function testEscapeFail()
{
$this->expectException(UnsupportedValueType::class);
$bindings = new Bindings();
$bindings->bindParams(['unsupportedParam' => curl_init()]);
$bindings->process('SELECT * FROM test WHERE id = :unsupportedParam');
}
public function testSelectAsKeys()
{
// chr(0....255);
$this->client->settings()->set('max_block_size', 100);
$bind['k1']=1;
$bind['k2']=2;
$select=[];
for($z=0;$z<200;$z++)
{
$bind['k'.$z]=chr($z);
$select[]=":k{$z} as k{$z}";
}
$rows=$this->client->select("SELECT ".implode(",\n",$select),$bind)->rows();
$this->assertNotEmpty($rows);
$row=$rows[0];
for($z=10;$z<100;$z++) {
$this->assertArrayHasKey('k'.$z,$row);
$this->assertEquals(chr($z),$row['k'.$z]);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,66 @@
<?php
namespace ClickHouseDB\Tests;
use PHPUnit\Framework\TestCase;
/**
* Class FormatQueryTest
* @package ClickHouseDB\Tests
* @group FormatQueryTest
*/
final class FormatQueryTest extends TestCase
{
use WithClient;
/**
* @throws Exception
*/
public function setUp()
{
date_default_timezone_set('Europe/Moscow');
$this->client->ping();
}
public function testCreateTableTEMPORARYNoSession()
{
$query="SELECT 2*number as FORMAT FROM system.numbers LIMIT 1,1 format TSV";
$st = $this->client->select($query);
$this->assertEquals($query, $st->sql());
$this->assertEquals('TSV', $st->getFormat());
$this->assertEquals("2\n", $st->rawData());
$query="SELECT number as format_id FROM system.numbers LIMIT 3 FORMAT CSVWithNames";
$st = $this->client->select($query);
$this->assertEquals($query, $st->sql());
$this->assertEquals('CSVWithNames', $st->getFormat());
$this->assertEquals("\"format_id\"\n0\n1\n2\n", $st->rawData());
$query="SELECT number as format_id FROM system.numbers LIMIT 1,1 FORMAT CSV";
$st = $this->client->select($query);
$this->assertEquals($query, $st->sql());
$this->assertEquals('CSV', $st->getFormat());
}
public function testClientTimeoutSettings()
{
$this->client->database('default');
$timeout = 1.5;
$this->client->setTimeout($timeout); // 1500 ms
$this->assertSame($timeout, $this->client->getTimeout());
$timeout = 10.0;
$this->client->setTimeout($timeout); // 10 seconds
$this->assertSame($timeout, $this->client->getTimeout());
$timeout = 5.0;
$this->client->setConnectTimeOut($timeout); // 5 seconds
$this->assertSame($timeout, $this->client->getConnectTimeOut());
}
}

View File

@ -0,0 +1,65 @@
<?php
namespace ClickHouseDB\Tests;
use ClickHouseDB\Exception\QueryException;
use PHPUnit\Framework\TestCase;
/**
* Class InsertAssocTest
* @group InsertAssocTest
* @package ClickHouseDB\Tests
*/
final class InsertAssocTest extends TestCase
{
use WithClient;
public function testPrepareOneRow()
{
$toInsert = [
'one' => 1,
'two' => 2,
'thr' => 3,
];
$exceptColumns = ['one','two','thr'];
$exceptValues = [[1,2,3]];
list($actualColumns, $actualValues) = $this->client->prepareInsertAssocBulk($toInsert);
$this->assertEquals($exceptValues, $actualValues);
$this->assertEquals($exceptColumns, $actualColumns);
}
public function testPrepareManyRowSuccess()
{
$oneRow = [
'one' => 1,
'two' => 2,
'thr' => 3,
];
$toInsert = [$oneRow, $oneRow, $oneRow];
$exceptColumns = ['one','two','thr'];
$exceptValues = [[1,2,3],[1,2,3],[1,2,3]];
list($actualColumns, $actualValues) = $this->client->prepareInsertAssocBulk($toInsert);
$this->assertEquals($exceptValues, $actualValues);
$this->assertEquals($exceptColumns, $actualColumns);
}
public function testPrepareManyRowFail()
{
$oneRow = [
'one' => 1,
'two' => 2,
'thr' => 3,
];
$failRow = [
'two' => 2,
'one' => 1,
'thr' => 3,
];
$toInsert = [$oneRow, $oneRow, $failRow];
$this->expectException(QueryException::class);
$this->expectExceptionMessage("Fields not match: two,one,thr and one,two,thr on element 2");
list($_, $__) = $this->client->prepareInsertAssocBulk($toInsert);
}
}

View File

@ -0,0 +1,40 @@
<?php
namespace ClickHouseDB\Tests;
use ClickHouseDB\Exception\QueryException;
use PHPUnit\Framework\TestCase;
/**
* Class JsonTest
* @group Json
* @package ClickHouseDB\Tests
*/
final class JsonTest extends TestCase
{
use WithClient;
public function testJSONEachRow()
{
$state=$this->client->select('SELECT sin(number) as sin,cos(number) as cos FROM {table_name} LIMIT 2 FORMAT JSONEachRow', ['table_name'=>'system.numbers']);
$checkString='{"sin":0,"cos":1}';
$this->assertContains($checkString,$state->rawData());
$state=$this->client->select('SELECT round(4+sin(number),2) as sin,round(4+cos(number),2) as cos FROM {table_name} LIMIT 2 FORMAT JSONCompact', ['table_name'=>'system.numbers']);
$re=[
[[4,5]],
[[4.84,4.54]]
];
// print_r($state->rows());
// print_r($re);
// die();
$this->assertEquals($re,$state->rows());
}
}

View File

@ -0,0 +1,46 @@
<?php
namespace ClickHouseDB\Tests;
use PHPUnit\Framework\TestCase;
/**
* Class ProgressAndEscapeTest
* @group ProgressAndEscapeTest
* @package ClickHouseDB\Tests
*/
final class ProgressAndEscapeTest extends TestCase
{
use WithClient;
/**
* @throws Exception
*/
public function setUp()
{
date_default_timezone_set('Europe/Moscow');
$this->client->ping();
}
public function testProgressFunction()
{
global $resultTest;
$this->client->settings()->set('max_block_size', 1);
$this->client->progressFunction(function ($data) {
global $resultTest;
$resultTest=$data;
});
$st=$this->client->select('SELECT number,sleep(0.1) FROM system.numbers limit 4');
// read_rows + read_bytes + total_rows
$this->assertArrayHasKey('read_rows',$resultTest);
$this->assertArrayHasKey('read_bytes',$resultTest);
$this->assertArrayHasKey('total_rows',$resultTest);
$this->assertGreaterThan(3,$resultTest['read_rows']);
$this->assertGreaterThan(3,$resultTest['read_bytes']);
}
}

View File

@ -0,0 +1,100 @@
<?php
namespace ClickHouseDB\Tests;
use ClickHouseDB\Exception\DatabaseException;
use PHPUnit\Framework\TestCase;
/**
* Class ClientTest
* @group ClientTest
*/
final class SessionsTest extends TestCase
{
use WithClient;
/**
* @throws Exception
*/
public function setUp()
{
date_default_timezone_set('Europe/Moscow');
$this->client->ping();
}
public function testCreateTableTEMPORARYNoSession()
{
$this->expectException(DatabaseException::class);
$this->client->write('DROP TABLE IF EXISTS phpunti_test_xxxx');
$this->client->write('
CREATE TEMPORARY TABLE IF NOT EXISTS phpunti_test_xxxx (
event_date Date DEFAULT toDate(event_time),
event_time DateTime,
url_hash String,
site_id Int32,
views Int32
) ENGINE = TinyLog
');
}
public function testUseSession()
{
$this->assertFalse($this->client->getSession());
$this->client->useSession();
$this->assertStringMatchesFormat('%s',$this->client->getSession());
}
public function testCreateTableTEMPORARYWithSessions()
{
// make two session tables
$table_name_A = 'phpunti_test_A_abcd_' . time();
$table_name_B = 'phpunti_test_B_abcd_' . time();
// make new session id
$A_Session_ID = $this->client->useSession()->getSession();
// create table in session A
$this->client->write(' CREATE TEMPORARY TABLE IF NOT EXISTS ' . $table_name_A . ' (number UInt64)');
$this->client->write('INSERT INTO ' . $table_name_A . ' SELECT number FROM system.numbers LIMIT 30');
$st = $this->client->select('SELECT round(avg(number),1) as avs FROM ' . $table_name_A);
// check
$this->assertEquals(14.5, $st->fetchOne('avs'));
// reconnect + reinit session
// create table in session B
$B_Session_ID = $this->client->useSession()->getSession();
$this->client->write(' CREATE TEMPORARY TABLE IF NOT EXISTS ' . $table_name_B . ' (number UInt64)');
$this->client->write('INSERT INTO ' . $table_name_B . ' SELECT number*1234 FROM system.numbers LIMIT 30');
$st = $this->client->select('SELECT round(avg(number),1) as avs FROM ' . $table_name_B);
// check
$this->assertEquals(17893, $st->fetchOne('avs'));
// Reuse session A
$this->client->useSession($A_Session_ID);
$st = $this->client->select('SELECT round(avg(number),1) as avs FROM ' . $table_name_A);
$this->assertEquals(14.5, $st->fetchOne('avs'));
// Reuse session B
$this->client->useSession($B_Session_ID);
$st = $this->client->select('SELECT round(avg(number),1) as avs FROM ' . $table_name_B);
// check
$this->assertEquals(17893, $st->fetchOne('avs'));
}
}

View File

@ -0,0 +1,72 @@
<?php
namespace ClickHouseDB\Tests;
use ClickHouseDB\Exception\QueryException;
use PHPUnit\Framework\TestCase;
/**
* Class StreamTest
* @group Stream
* @package ClickHouseDB\Tests
*/
final class StreamTest extends TestCase
{
use WithClient;
public function testStreamRead()
{
$stream = fopen('php://memory','r+');
$streamRead=new \ClickHouseDB\Transport\StreamRead($stream);
$callable = function ($ch, $string) use ($stream) {
// some magic for _BLOCK_ data
fwrite($stream, str_ireplace('"sin"','"max"',$string));
return strlen($string);
};
$streamRead->closure($callable);
$state=$this->client->streamRead($streamRead,'SELECT sin(number) as sin,cos(number) as cos FROM {table_name} LIMIT 2 FORMAT JSONEachRow', ['table_name'=>'system.numbers']);
rewind($stream);
$bufferCheck='';
while (($buffer = fgets($stream, 4096)) !== false) {
$bufferCheck=$bufferCheck.$buffer;
}
fclose($stream);
$checkString='{"max":0,"cos":1}';
$this->assertContains($checkString,$bufferCheck);
}
public function testStreamInsert()
{
$this->client->write('DROP TABLE IF EXISTS _phpCh_SteamTest');
$this->client->write('CREATE TABLE _phpCh_SteamTest (a Int32) Engine=Log');
$stream = fopen('php://memory','r+');
for($f=0;$f<121123;$f++)
fwrite($stream, json_encode(['a'=>$f]).PHP_EOL );
rewind($stream);
$streamWrite=new \ClickHouseDB\Transport\StreamWrite($stream);
$streamWrite->applyGzip();
$callable = function ($ch, $fd, $length) use ($stream) {
return ($line = fread($stream, $length)) ? $line : '';
};
$streamWrite->closure($callable);
$state=$this->client->streamWrite($streamWrite,'INSERT INTO {table_name} FORMAT JSONEachRow', ['table_name'=>'_phpCh_SteamTest']);
$sum=$this->client->select("SELECT sum(a) as s FROM _phpCh_SteamTest ")->fetchOne('s');
$this->assertEquals(7335330003, $sum);
}
}

View File

@ -0,0 +1,71 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Tests;
use ClickHouseDB\Quote\StrictQuoteLine;
use PHPUnit\Framework\TestCase;
use function array_diff;
use function array_map;
use function file_put_contents;
use function unlink;
use const FILE_APPEND;
class StrictQuoteLineTest extends TestCase
{
use WithClient;
/**
* @return void
*/
public function setUp()
{
$this->client->write('DROP TABLE IF EXISTS cities');
$this->client->write('
CREATE TABLE IF NOT EXISTS cities (
date Date,
city String,
keywords Array(String),
nums Array(UInt8)
) ENGINE = MergeTree(date, (date), 8192)
');
parent::setUp();
}
/**
* @group test
*
* @return void
*/
public function testQuoteValueCSV()
{
$strict = new StrictQuoteLine('CSV');
$rows = [
['2018-04-01', '"That works"', ['\"That does not\"', 'That works'], [8, 7]],
['2018-04-02', 'That works', ['\""That does not\""', '"\'\""That works"""\"'], [1, 0]],
['2018-04-03', 'That works', ['\"\"That does not"\'""', '""""That works""""'], [9, 121]],
];
$fileName = $this->tmpPath . '__test_quote_value.csv';
@unlink($fileName);
foreach ($rows as $row) {
file_put_contents($fileName, $strict->quoteRow($row) . "\n", FILE_APPEND);
}
$this->client->insertBatchFiles('cities', [$fileName], ['date', 'city', 'keywords', 'nums']);
$statement = $this->client->select('SELECT * FROM cities');
$result = array_map('array_values', $statement->rows());
foreach ($result as $key => $value) {
// check correct quote string
$this->assertEmpty(array_diff($rows[$key][2], $value[2]));
$this->assertEmpty(array_diff($rows[$key][3], $value[3]));
}
$rows[0][2][1] = 'Not the same string';
$this->assertCount(1, array_diff($rows[0][2], $result[0][2]));
}
}

View File

@ -0,0 +1,51 @@
<?php
namespace ClickHouseDB\Tests;
use ClickHouseDB\Exception\QueryException;
use PHPUnit\Framework\TestCase;
/**
* Class TableSizeTest
* @group TableSize
* @package ClickHouseDB\Tests
*/
final class TableSizeTest extends TestCase
{
use WithClient;
public function testPrepareManyRowFail()
{
// make two session tables
$table_name_A = 'phpunti_test_A_ab11cd_' . time();
$table_name_B = 'phpunti_test_B_ab22cd_' . time();
// create table in session A
$this->client->write(' DROP TABLE IF EXISTS ' . $table_name_A . ' ; ');
$this->client->write(' DROP TABLE IF EXISTS ' . $table_name_B . ' ; ');
$this->client->write(' CREATE TABLE ' . $table_name_A . ' (number UInt64) ENGINE = Log;');
$this->client->write(' CREATE TABLE ' . $table_name_B . ' (number UInt64) ENGINE = Log;');
$this->client->write(' INSERT INTO ' . $table_name_A . ' SELECT number FROM system.numbers LIMIT 30');
$this->client->write(' INSERT INTO ' . $table_name_B . ' SELECT number FROM system.numbers LIMIT 30');
$size=$this->client->tablesSize();
$this->assertArrayHasKey($table_name_A, $size);
$this->assertArrayHasKey($table_name_B, $size);
$size=$this->client->tableSize($table_name_A);
$this->assertArrayHasKey('table', $size);
$this->assertArrayHasKey('database', $size);
$this->assertArrayHasKey('sizebytes', $size);
$this->assertArrayHasKey('size', $size);
$this->assertArrayHasKey('min_date', $size);
$this->assertArrayHasKey('max_date', $size);
$this->client->write(' DROP TABLE IF EXISTS ' . $table_name_A . ' ; ');
$this->client->write(' DROP TABLE IF EXISTS ' . $table_name_B . ' ; ');
}
}

View File

@ -0,0 +1,85 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Tests\Type;
use ClickHouseDB\Tests\WithClient;
use ClickHouseDB\Type\UInt64;
use DateTimeImmutable;
use PHPUnit\Framework\TestCase;
use function array_column;
use function implode;
use function sprintf;
/**
* @group integration
*/
final class UInt64Test extends TestCase
{
use WithClient;
/**
* @return void
*/
public function setUp()
{
$this->client->write('DROP TABLE IF EXISTS uint64_data');
$this->client->write('
CREATE TABLE IF NOT EXISTS uint64_data (
date Date MATERIALIZED toDate(datetime),
datetime DateTime,
number UInt64
)
ENGINE = MergeTree
PARTITION BY date
ORDER BY (datetime);
');
parent::setUp();
}
/**
* @return void
*/
public function testWriteInsert()
{
$this->client->write(sprintf(
'INSERT INTO uint64_data VALUES %s',
implode(
',',
[
sprintf('(now(), %s)', UInt64::fromString('0')),
sprintf('(now(), %s)', UInt64::fromString('1')),
sprintf('(now(), %s)', UInt64::fromString('18446744073709551615')),
]
)
));
$statement = $this->client->select('SELECT number FROM uint64_data ORDER BY number ASC');
self::assertSame(3, $statement->count());
self::assertSame(['0', '1', '18446744073709551615'], array_column($statement->rows(), 'number'));
}
/**
* @return void
*/
public function testInsert()
{
$now = new DateTimeImmutable();
$this->client->insert(
'uint64_data',
[
[$now, UInt64::fromString('0')],
[$now, UInt64::fromString('1')],
[$now, UInt64::fromString('18446744073709551615')],
]
);
$statement = $this->client->select('SELECT number FROM uint64_data ORDER BY number ASC');
self::assertSame(3, $statement->count());
self::assertSame(['0', '1', '18446744073709551615'], array_column($statement->rows(), 'number'));
}
}

View File

@ -0,0 +1,79 @@
<?php
namespace ClickHouseDB\Tests;
use ClickHouseDB\Exception\QueryException;
use PHPUnit\Framework\TestCase;
/**
* Class UriTest
* @group Uri
* @package ClickHouseDB\Tests
*/
final class UriTest extends TestCase
{
use WithClient;
public function testUriMake()
{
$config = [
'host' => '11.12.13.14',
'port' => 8123,
'username' => 'uu',
'password' => 'pp',
];
$cli = new \ClickHouseDB\Client($config);
//
$this->assertEquals('http://11.12.13.14:8123' , $cli->transport()->getUri());
$cli->https(true);
$this->assertEquals('https://11.12.13.14:8123' , $cli->transport()->getUri());
$config['host']='blabla.com';
$cli = new \ClickHouseDB\Client($config);
$cli->https(true);
$this->assertEquals('https://blabla.com:8123' , $cli->transport()->getUri());
$config['host']='blabla.com:8111';
$cli = new \ClickHouseDB\Client($config);
$this->assertEquals('http://blabla.com:8111' , $cli->transport()->getUri());
$config['host']='blabla.com/urls';
$cli = new \ClickHouseDB\Client($config);
$this->assertEquals('http://blabla.com/urls' , $cli->transport()->getUri());
$config['host']='blabla.com';
$config['port']=0;
$cli = new \ClickHouseDB\Client($config);
$this->assertEquals('http://blabla.com' , $cli->transport()->getUri());
$config['host']='blabla.com';
$config['port']=false;
$cli = new \ClickHouseDB\Client($config);
$this->assertEquals('http://blabla.com' , $cli->transport()->getUri());
$config['host']='blabla.com:8222/path1/path';
$config['port']=false;
$cli = new \ClickHouseDB\Client($config);
$this->assertEquals('http://blabla.com:8222/path1/path' , $cli->transport()->getUri());
$config['host']='blabla.com:1234/path1/path';
$config['port']=3344;
$cli = new \ClickHouseDB\Client($config);
$this->assertEquals('http://blabla.com:1234/path1/path' , $cli->transport()->getUri());
// exit resetup
$this->restartClickHouseClient();
}
}

View File

@ -0,0 +1,50 @@
<?php
declare(strict_types=1);
namespace ClickHouseDB\Tests;
use ClickHouseDB\Client;
use function getenv;
use function sprintf;
trait WithClient
{
/** @var Client */
private $client;
private $tmpPath;
/**
* @before
*/
public function setupClickHouseClient()
{
$this->restartClickHouseClient();
$this->tmpPath = getenv('CLICKHOUSE_TMPPATH') . DIRECTORY_SEPARATOR;
}
public function restartClickHouseClient()
{
$config = [
'host' => getenv('CLICKHOUSE_HOST'),
'port' => getenv('CLICKHOUSE_PORT'),
'username' => getenv('CLICKHOUSE_USER'),
'password' => getenv('CLICKHOUSE_PASSWORD'),
];
$this->client = new Client($config);
$databaseName = getenv('CLICKHOUSE_DATABASE');
if (!$databaseName || $databaseName==='default') {
throw new \Exception('Change CLICKHOUSE_DATABASE, not use default');
}
if (empty($GLOBALS['phpCH_needFirstCreateDB'])) { // hack use Global VAR, for once create DB
$GLOBALS['phpCH_needFirstCreateDB']=true;
$this->client->write(sprintf('DROP DATABASE IF EXISTS "%s"', $databaseName));
$this->client->write(sprintf('CREATE DATABASE "%s"', $databaseName));
}
// Change Database
$this->client->database($databaseName);
}
}

View File

@ -0,0 +1,13 @@
version: '3'
services:
clickhouse-server:
image: yandex/clickhouse-server
hostname: clickhouse
container_name: clickhouse
ports:
- 9000:9000
- 8123:8123
ulimits:
nofile:
soft: 262144
hard: 262144

View File

@ -103,6 +103,7 @@ define('DB_DB2', 5);
define('DB_FIREBIRD', 6);
define('DB_INFORMIX', 7);
define('DB_SQLITE', 8);
define('DB_ClickHouse', 9);
// ---
// --- Define supported AUTH Methods

View File

@ -366,6 +366,16 @@ $dbmapping['monitorware']['DBMAPPINGS'][MISC_SYSTEMID] = "SystemID";
$dbmapping['monitorware']['DBMAPPINGS'][MISC_CHECKSUM] = "Checksum";
//$dbmapping['monitorware']['DBMAPPINGS'][SYSLOG_PROCESSID] = "ProcessID";
// --- Default ClickHouse Mapping
$dbmapping['clickhouse']['ID'] = "clickhouse";
$dbmapping['clickhouse']['DisplayName'] = "ClickHouse";
$dbmapping['clickhouse']['DBMAPPINGS'][SYSLOG_UID] = "ID";
$dbmapping['clickhouse']['DBMAPPINGS'][SYSLOG_MESSAGE] = "Message";
$dbmapping['clickhouse']['DBMAPPINGS'][SYSLOG_FACILITY] = "Facility";
$dbmapping['clickhouse']['DBMAPPINGS'][SYSLOG_SEVERITY] = "Severity";
$dbmapping['clickhouse']['DBMAPPINGS'][SYSLOG_SYSLOGTAG] = "tag";
$dbmapping['syslogng']['ID'] = "syslogng";
$dbmapping['syslogng']['DisplayName'] = "SyslogNG";
$dbmapping['syslogng']['DBMAPPINGS'][SYSLOG_UID] = "seq";
@ -404,4 +414,4 @@ define('EVTIME_TIMESTAMP', '0');
define('EVTIME_TIMEZONE', '1');
define('EVTIME_MICROSECONDS', '2');
?>
?>

View File

@ -183,7 +183,7 @@ function InitSource(&$mysource)
if ( isset($mysource['DBType']) )
$mysource['ObjRef']->DBType = $mysource['DBType'];
else
$mysource['ObjRef']->DBType = DB_MYSQL;
$mysource['ObjRef']->DBType = DB_ClickHouse;
$mysource['ObjRef']->DBTableName = $mysource['DBTableName'];

View File

@ -243,7 +243,7 @@ function ConvertCustomSources()
else // Force to number
$mySource['DBEnableRowCounting'] = intval($mySource['DBEnableRowCounting']);
if ( !isset($mySource['DBType']) )
$mySource['DBType'] = DB_MYSQL;
$mySource['DBType'] = DB_ClickHouse;
// Perform the insert
$result = DB_Query("INSERT INTO `" . DB_SOURCES . "` (Name, Description, SourceType, MsgParserList, MsgNormalize, ViewID, DBTableType, DBType, DBServer, DBName, DBUser, DBPassword, DBTableName, DBEnableRowCounting) VALUES ( " .