diff --git a/fhem/CHANGED b/fhem/CHANGED index 560680f04..e8f4706cd 100644 --- a/fhem/CHANGED +++ b/fhem/CHANGED @@ -1,5 +1,7 @@ # Add changes at the top of the list. Keep it in ASCII, and 80-char wide. # Do not insert empty lines here, update check depends on it. + - bugfix: 93_DbRep: bugfix group by-clause due to incompatible changes made + in MyQL 5.7.5 (Forum #msg541103) - feature: 66_ECMD, 67_ECMDDevice: - if split is used, the strings at which the messages are split are still part of the messages diff --git a/fhem/FHEM/93_DbRep.pm b/fhem/FHEM/93_DbRep.pm index 8259f1709..62d2cfc6d 100644 --- a/fhem/FHEM/93_DbRep.pm +++ b/fhem/FHEM/93_DbRep.pm @@ -40,6 +40,11 @@ ########################################################################################################### # Versions History: # +# 4.8.6 17.12.2016 new bugfix group by-clause due to incompatible changes made in MyQL 5.7.5 +# (Forum #msg541103) +# 4.8.5 16.12.2016 bugfix group by-clause due to Forum #msg540610 +# 4.8.4 13.12.2016 added "group by ...,table_schema" to select in dbmeta_DoParse due to Forum #msg539228, +# commandref adapted, changed "not_enough_data_in_period" to "less_data_in_period" # 4.8.3 12.12.2016 balance diff to next period if value of period is 0 between two periods with # values # 4.8.2 10.12.2016 bugfix negativ diff if balanced @@ -161,7 +166,7 @@ use Blocking; use Time::Local; # no if $] >= 5.017011, warnings => 'experimental'; -my $DbRepVersion = "4.8.3"; +my $DbRepVersion = "4.8.6"; my %dbrep_col = ("DEVICE" => 64, "TYPE" => 64, @@ -549,9 +554,9 @@ sub DbRep_Attr($$$$) { if ($aName eq "reading" || $aName eq "device") { if ($dbmodel && $dbmodel ne 'SQLITE') { if ($dbmodel eq 'POSTGRESQL') { - return "Length of \"$aName\" is too big. Maximum lenth for database type $dbmodel is $dbrep_col{READING}" if(length($aVal) > $dbrep_col{READING}); + return "Length of \"$aName\" is too big. Maximum length for database type $dbmodel is $dbrep_col{READING}" if(length($aVal) > $dbrep_col{READING}); } elsif ($dbmodel eq 'MYSQL') { - return "Length of \"$aName\" is too big. Maximum lenth for database type $dbmodel is $dbrep_col{READING}" if(length($aVal) > $dbrep_col{READING}); + return "Length of \"$aName\" is too big. Maximum length for database type $dbmodel is $dbrep_col{READING}" if(length($aVal) > $dbrep_col{READING}); } } } @@ -2092,7 +2097,7 @@ sub diffval_ParseDone($) { readingsBulkUpdate($hash, "background_processing_time", sprintf("%.4f",$brt)) if(AttrVal($name, "showproctime", undef)); readingsBulkUpdate($hash, "sql_processing_time", sprintf("%.4f",$rt)) if(AttrVal($name, "showproctime", undef)); readingsBulkUpdate($hash, "diff-overrun_limit-".$difflimit, $rowsrej) if($rowsrej); - readingsBulkUpdate($hash, "not_enough_data_in_period", $ncpstr) if($ncpstr); + readingsBulkUpdate($hash, "less_data_in_period", $ncpstr) if($ncpstr); readingsBulkUpdate($hash, "state", ($ncpstr||$rowsrej)?"Warning":"done"); readingsEndUpdate($hash, 1); @@ -3164,6 +3169,18 @@ sub dbmeta_DoParse($) { my $sth; my $sql; + # due to incompatible changes made in MyQL 5.7.5, see http://johnemb.blogspot.de/2014/09/adding-or-removing-individual-sql-modes.html + if($dbmodel eq "MYSQL") { + eval {$dbh->do("SET sql_mode=(SELECT REPLACE(\@\@sql_mode,'ONLY_FULL_GROUP_BY',''));");}; + } + if ($@) { + $err = encode_base64($@,""); + Log3 ($name, 2, "DbRep $name - $@"); + $dbh->disconnect; + Log3 ($name, 4, "DbRep $name -> BlockingCall dbmeta_DoParse finished"); + return "$name|''|''|''|$err"; + } + if ($opt ne "svrinfo") { foreach my $ple (@parlist) { if ($opt eq "dbvars") { @@ -3181,7 +3198,7 @@ sub dbmeta_DoParse($) { engine, table_type, create_time - from information_schema.tables group by table_name;"; + from information_schema.tables group by 1;"; } Log3($name, 4, "DbRep $name - SQL execute: $sql"); @@ -3207,7 +3224,7 @@ sub dbmeta_DoParse($) { $param =~ tr/%//d; if($line[0] =~ m/($param)/i) { push(@row_array, $line[0].".table_schema ".$line[1]); - push(@row_array, $line[0].".data_index_lenth_MB ".$line[2]); + push(@row_array, $line[0].".data_index_length_MB ".$line[2]); push(@row_array, $line[0].".table_name ".$line[1]); push(@row_array, $line[0].".data_free_MB ".$line[3]); push(@row_array, $line[0].".row_format ".$line[4]); @@ -3753,7 +3770,7 @@ return; # If "Value=0" has to be inserted, use "Value = 0.0" to do it.

example: 2016-08-01,23:00:09,TestValue,TestUnit
- # field lenth is maximum 32 (MYSQL) / 64 (POSTGRESQL) characters long, Spaces are NOT allowed in fieldvalues !
+ # field length is maximum 32 (MYSQL) / 64 (POSTGRESQL) characters long, Spaces are NOT allowed in fieldvalues !

Note:
@@ -3792,15 +3809,22 @@ return; The reading to evaluate must be defined using attribute "reading". This function is mostly reasonable if readingvalues are increasing permanently and don't write value-differences to the database. The difference will be generated from the first available dataset (VALUE-Field) to the last available dataset between the - specified time linits/aggregation. + specified time linits/aggregation, in which a balanced difference value of the previous aggregation period will be transfered to the + following aggregation period in case this period contains a value.
An possible counter overrun (restart with value "0") will be considered (compare attribute "diffAccept").
- If only one dataset will be found within the evalution period, no difference can be calculated - and the reading "not_enough_data_in_period" with a list of concerned periods will be generated in that case.

+ If only one dataset will be found within the evalution period, the difference can be calculated only in combination with the balanced + difference of the previous aggregation period. In this case a logical inaccuracy according the assignment of the difference to the particular aggregation period + can be possible. Hence in warning in "state" will be placed and the reading "less_data_in_period" with a list of periods + with only one dataset found in it will be created.

+ + + no difference can be calculated + and the reading "less_data_in_period" with a list of concerned periods will be generated in that case.